Merge branch 'main' into quieter_self_connection_logging

This commit is contained in:
Kyle Altendorf 2023-03-29 22:06:21 -04:00
commit e376f82860
No known key found for this signature in database
81 changed files with 1529 additions and 1011 deletions

View File

@ -63,6 +63,7 @@ jobs:
matrix: '3.7'
exclude_from:
limited: True
main: True
- name: '3.8'
file_name: '3.8'
action: '3.8'
@ -71,6 +72,7 @@ jobs:
matrix: '3.8'
exclude_from:
limited: True
main: True
- name: '3.9'
file_name: '3.9'
action: '3.9'
@ -85,6 +87,7 @@ jobs:
matrix: '3.10'
exclude_from:
limited: True
main: True
- name: '3.11'
file_name: '3.11'
action: '3.11'
@ -93,6 +96,7 @@ jobs:
matrix: '3.11'
exclude_from:
limited: True
main: True
exclude:
- os:
matrix: macos

View File

@ -41,7 +41,7 @@ jobs:
python tests/build-job-matrix.py --per directory --verbose > matrix.json
cat matrix.json
echo configuration=$(cat matrix.json) >> $GITHUB_OUTPUT
echo matrix_mode=${{ ( github.repository_owner == 'Chia-Network' && github.repository != 'Chia-Network/chia-blockchain' ) && 'limited' || 'all' }} >> $GITHUB_OUTPUT
echo matrix_mode=${{ ( github.event_name == 'workflow_dispatch' ) && 'all' || ( github.repository_owner == 'Chia-Network' && github.repository != 'Chia-Network/chia-blockchain' ) && 'limited' || ( github.repository_owner == 'Chia-Network' && github.repository == 'Chia-Network/chia-blockchain' && github.ref == 'refs/heads/main' ) && 'main' || ( github.repository_owner == 'Chia-Network' && github.repository == 'Chia-Network/chia-blockchain' && startsWith(github.ref, 'refs/heads/release/') ) && 'all' || 'main' }} >> $GITHUB_OUTPUT
outputs:
configuration: ${{ steps.configure.outputs.configuration }}

View File

@ -6,6 +6,53 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html)
for setuptools_scm/PEP 440 reasons.
## 1.7.1 Chia blockchain 2023-03-22
### Added
- `get_transaction_memo` wallet RPC
- `set_wallet_resync_on_startup` wallet RPC to reset wallet sync data on wallet restart
- `nft_count_nfts` wallet RPC - counts NFTs per wallet or for all wallets
- Community DNS introducers to initial (default) config.yaml
- additional metrics for `state_changed` events (used by chia-exporter)
- Python 3.11 support
- `chia wallet check` CLI command
- `reuse_public_key_for_change` config.yaml option to allow address reuse for change
- `nft_id` added to the JSON output of all NFT RPCs
- `curry` Chialisp library replaces `curry-and-treehash`
### Changed
- `chia show -f` changed to output proper JSON
- `Rate limiting` log messages are themselves rate limited
- Notified GUI when wallets are removed
- Optimized counting of NFTs during removal by leveraging SQLite
- Offer CLI command help now shows `--fee` units as XCH
- Optimized offer code by limiting `additions` recomputation
- `chia_rs` updated to 0.2.4
- Improve the help text for the `chia peer` and `chia peer -a` commands
- Remove redundant checks for minting coin and reserve fee
- `nft_get_nfts` performance improvements by shifting paging to SQLite
- `did_find_lost_did` improved
- Extend the sign APIs to support hex string
- Changed mempool backend to use an in-memory SQLite DB
### Fixed
- Quieted wallet log output for `Record: ... not in mempool` (fixes #14452)
- Quieted log output for `AttributeError: 'NoneType' object has no attribute '_get_extra_info`
- Reduced log output for `Using previous generator for height`
- Fixed error message when the `coin_id` argument for `nft_get_info` cannot be decoded
- Reduced cases where wallet claims to be synced while still syncing
- Resolved unnecessary error logging caused by unhinted coins (see #14757)
- Avoid `Unclosed client session` errors and associated tracebacks when using Ctrl-c during CLI commands
- Avoid `AttributeError` when shutting down plotting
- Added `anyio` as a dependency
- Resolved issues when offers involve small amounts and royalties round down to zero (fixes #14744)
- Limit retries to 3 for submitting offer transactions to the mempool and improved handling of offer status (fixes #14714)
- Resolved issues with offers stuck as pending when multiple people accept the same offer (fixes #14621)
- Improved the accuracy of the wallet sync status indication
### Deprecated
- `curry-and-treehash` Chialisp library replaced by new `curry` library
## 1.7.0 Chia blockchain 2023-02-15
### Added

@ -1 +1 @@
Subproject commit 91234eedaa3fd515699f4d7e38910b5314306cb4
Subproject commit a66e66eb873a9eeea1d234c0d06c47ef6303bc5a

View File

@ -65,7 +65,7 @@ class CostLogger:
def add_cost(self, descriptor: str, spend_bundle: SpendBundle) -> SpendBundle:
program: BlockGenerator = simple_solution_generator(spend_bundle)
npc_result: NPCResult = get_name_puzzle_conditions(
program, INFINITE_COST, cost_per_byte=DEFAULT_CONSTANTS.COST_PER_BYTE, mempool_mode=True
program, INFINITE_COST, mempool_mode=True, height=DEFAULT_CONSTANTS.SOFT_FORK2_HEIGHT
)
self.cost_dict[descriptor] = npc_result.cost
cost_to_subtract: int = 0

View File

@ -91,7 +91,7 @@ def verify_passphrase_meets_requirements(
def prompt_for_passphrase(prompt: str) -> str:
if sys.platform == "win32" or sys.platform == "cygwin":
print(prompt, end="")
print(prompt, end="", flush=True)
prompt = ""
return getpass(prompt)

View File

@ -59,6 +59,7 @@ async def validate_block_body(
if isinstance(block, FullBlock):
assert height == block.height
prev_transaction_block_height: uint32 = uint32(0)
prev_transaction_block_timestamp: uint64 = uint64(0)
# 1. For non transaction-blocs: foliage block, transaction filter, transactions info, and generator must
# be empty. If it is a block but not a transaction block, there is no body to validate. Check that all fields are
@ -103,6 +104,8 @@ async def validate_block_body(
# Add reward claims for all blocks from the prev prev block, until the prev block (including the latter)
prev_transaction_block = blocks.block_record(block.foliage_transaction_block.prev_transaction_block_hash)
prev_transaction_block_height = prev_transaction_block.height
assert prev_transaction_block.timestamp
prev_transaction_block_timestamp = prev_transaction_block.timestamp
assert prev_transaction_block.fees is not None
pool_coin = create_pool_coin(
prev_transaction_block_height,
@ -316,7 +319,6 @@ async def validate_block_body(
curr_npc_result = get_name_puzzle_conditions(
curr_block_generator,
min(constants.MAX_BLOCK_COST_CLVM, curr.transactions_info.cost),
cost_per_byte=constants.COST_PER_BYTE,
mempool_mode=False,
height=curr.height,
constants=constants,
@ -457,11 +459,18 @@ async def validate_block_body(
# verify absolute/relative height/time conditions
if npc_result is not None:
assert npc_result.conds is not None
block_timestamp: uint64
if height < constants.SOFT_FORK2_HEIGHT:
block_timestamp = block.foliage_transaction_block.timestamp
else:
block_timestamp = prev_transaction_block_timestamp
error = mempool_check_time_locks(
removal_coin_records,
npc_result.conds,
prev_transaction_block_height,
block.foliage_transaction_block.timestamp,
block_timestamp,
)
if error:
return error, None

View File

@ -131,10 +131,7 @@ def create_foliage(
if block_generator is not None:
generator_block_heights_list = block_generator.block_height_list
result: NPCResult = get_name_puzzle_conditions(
block_generator,
constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=constants.COST_PER_BYTE,
mempool_mode=True,
block_generator, constants.MAX_BLOCK_COST_CLVM, mempool_mode=True, height=height
)
cost = result.cost

View File

@ -54,9 +54,9 @@ from chia.util.setproctitle import getproctitle, setproctitle
log = logging.getLogger(__name__)
class ReceiveBlockResult(Enum):
class AddBlockResult(Enum):
"""
When Blockchain.receive_block(b) is called, one of these results is returned,
When Blockchain.add_block(b) is called, one of these results is returned,
showing whether the block was added to the chain (extending the peak),
and if not, why it was not added.
"""
@ -193,12 +193,12 @@ class Blockchain(BlockchainInterface):
async def get_full_block(self, header_hash: bytes32) -> Optional[FullBlock]:
return await self.block_store.get_full_block(header_hash)
async def receive_block(
async def add_block(
self,
block: FullBlock,
pre_validation_result: PreValidationResult,
fork_point_with_peak: Optional[uint32] = None,
) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[StateChangeSummary]]:
) -> Tuple[AddBlockResult, Optional[Err], Optional[StateChangeSummary]]:
"""
This method must be called under the blockchain lock
Adds a new block into the blockchain, if it's valid and connected to the current
@ -223,18 +223,18 @@ class Blockchain(BlockchainInterface):
genesis: bool = block.height == 0
if self.contains_block(block.header_hash):
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None
return AddBlockResult.ALREADY_HAVE_BLOCK, None, None
if not self.contains_block(block.prev_header_hash) and not genesis:
return ReceiveBlockResult.DISCONNECTED_BLOCK, Err.INVALID_PREV_BLOCK_HASH, None
return AddBlockResult.DISCONNECTED_BLOCK, Err.INVALID_PREV_BLOCK_HASH, None
if not genesis and (self.block_record(block.prev_header_hash).height + 1) != block.height:
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None
return AddBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None
npc_result: Optional[NPCResult] = pre_validation_result.npc_result
required_iters = pre_validation_result.required_iters
if pre_validation_result.error is not None:
return ReceiveBlockResult.INVALID_BLOCK, Err(pre_validation_result.error), None
return AddBlockResult.INVALID_BLOCK, Err(pre_validation_result.error), None
assert required_iters is not None
error_code, _ = await validate_block_body(
@ -252,7 +252,7 @@ class Blockchain(BlockchainInterface):
validate_signature=not pre_validation_result.validated_signature,
)
if error_code is not None:
return ReceiveBlockResult.INVALID_BLOCK, error_code, None
return AddBlockResult.INVALID_BLOCK, error_code, None
block_record = block_to_block_record(
self.constants,
@ -300,9 +300,9 @@ class Blockchain(BlockchainInterface):
if state_change_summary is not None:
# new coin records added
return ReceiveBlockResult.NEW_PEAK, None, state_change_summary
return AddBlockResult.NEW_PEAK, None, state_change_summary
else:
return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
return AddBlockResult.ADDED_AS_ORPHAN, None, None
async def _reconsider_peak(
self,
@ -438,11 +438,7 @@ class Blockchain(BlockchainInterface):
block_generator: Optional[BlockGenerator] = await self.get_block_generator(block)
assert block_generator is not None
npc_result = get_name_puzzle_conditions(
block_generator,
self.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.constants.COST_PER_BYTE,
mempool_mode=False,
height=block.height,
block_generator, self.constants.MAX_BLOCK_COST_CLVM, mempool_mode=False, height=block.height
)
tx_removals, tx_additions = tx_removals_and_additions(npc_result.conds)
return tx_removals, tx_additions, npc_result
@ -539,6 +535,9 @@ class Blockchain(BlockchainInterface):
async def validate_unfinished_block_header(
self, block: UnfinishedBlock, skip_overflow_ss_validation: bool = True
) -> Tuple[Optional[uint64], Optional[Err]]:
if len(block.transactions_generator_ref_list) > self.constants.MAX_GENERATOR_REF_LIST_SIZE:
return None, Err.TOO_MANY_GENERATOR_REFS
if (
not self.contains_block(block.prev_header_hash)
and block.prev_header_hash != self.constants.GENESIS_CHALLENGE

View File

@ -56,8 +56,7 @@ default_kwargs = {
"MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list
"POOL_SUB_SLOT_ITERS": 37600000000, # iters limit * NUM_SPS
"SOFT_FORK_HEIGHT": 3630000,
# the soft-fork 2 is disabled (for now)
"SOFT_FORK2_HEIGHT": 3830000,
"SOFT_FORK2_HEIGHT": 4000000,
}

View File

@ -91,7 +91,6 @@ def batch_pre_validate_blocks(
npc_result = get_name_puzzle_conditions(
block_generator,
min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
cost_per_byte=constants.COST_PER_BYTE,
mempool_mode=False,
height=block.height,
constants=constants,
@ -119,7 +118,7 @@ def batch_pre_validate_blocks(
if error_int is None:
# If this is False, it means either we don't have a signature (not a tx block) or we have an invalid
# signature (which also puts in an error) or we didn't validate the signature because we want to
# validate it later. receive_block will attempt to validate the signature later.
# validate it later. add_block will attempt to validate the signature later.
if validate_signatures:
if npc_result is not None and block.transactions_info is not None:
assert npc_result.conds
@ -387,7 +386,6 @@ def _run_generator(
npc_result: NPCResult = get_name_puzzle_conditions(
block_generator,
min(constants.MAX_BLOCK_COST_CLVM, unfinished_block.transactions_info.cost),
cost_per_byte=constants.COST_PER_BYTE,
mempool_mode=False,
height=height,
)

View File

@ -279,7 +279,7 @@ class WebSocketServer:
return ws
async def send_all_responses(self, connections: Set[WebSocketResponse], response: str) -> None:
for connection in connections:
for connection in connections.copy():
try:
await connection.send_str(response)
except Exception as e:

View File

@ -131,8 +131,8 @@ class DataLayerWallet:
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.DATA_LAYER)
def type(cls) -> WalletType:
return WalletType.DATA_LAYER
def id(self) -> uint32:
return self.wallet_info.id
@ -154,7 +154,7 @@ class DataLayerWallet:
self.standard_wallet = wallet
for _, w in self.wallet_state_manager.wallets.items():
if w.type() == uint8(WalletType.DATA_LAYER):
if w.type() == WalletType.DATA_LAYER:
raise ValueError("DataLayer Wallet already exists for this key")
assert name is not None
@ -163,9 +163,7 @@ class DataLayerWallet:
WalletType.DATA_LAYER.value,
"",
)
self.wallet_id = uint8(self.wallet_info.id)
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id)
await self.wallet_state_manager.add_new_wallet(self)
return self

View File

@ -17,7 +17,7 @@ from blspy import AugSchemeMPL
from chia.consensus.block_creation import unfinished_block_to_full_block
from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain import Blockchain, ReceiveBlockResult, StateChangeSummary
from chia.consensus.blockchain import AddBlockResult, Blockchain, StateChangeSummary
from chia.consensus.blockchain_interface import BlockchainInterface
from chia.consensus.constants import ConsensusConstants
from chia.consensus.cost_calculator import NPCResult
@ -1256,11 +1256,11 @@ class FullNode:
assert pre_validation_results[i].required_iters is not None
state_change_summary: Optional[StateChangeSummary]
advanced_peak = agg_state_change_summary is not None
result, error, state_change_summary = await self.blockchain.receive_block(
result, error, state_change_summary = await self.blockchain.add_block(
block, pre_validation_results[i], None if advanced_peak else fork_point
)
if result == ReceiveBlockResult.NEW_PEAK:
if result == AddBlockResult.NEW_PEAK:
assert state_change_summary is not None
# Since all blocks are contiguous, we can simply append the rollback changes and npc results
if agg_state_change_summary is None:
@ -1275,7 +1275,7 @@ class FullNode:
agg_state_change_summary.new_npc_results + state_change_summary.new_npc_results,
agg_state_change_summary.new_rewards + state_change_summary.new_rewards,
)
elif result == ReceiveBlockResult.INVALID_BLOCK or result == ReceiveBlockResult.DISCONNECTED_BLOCK:
elif result == AddBlockResult.INVALID_BLOCK or result == AddBlockResult.DISCONNECTED_BLOCK:
if error is not None:
self.log.error(f"Error: {error}, Invalid block from peer: {peer.get_peer_logging()} ")
return False, agg_state_change_summary
@ -1666,14 +1666,14 @@ class FullNode:
pre_validation_results = await self.blockchain.pre_validate_blocks_multiprocessing(
[block], npc_results, validate_signatures=False
)
added: Optional[ReceiveBlockResult] = None
added: Optional[AddBlockResult] = None
pre_validation_time = time.time() - validation_start
try:
if len(pre_validation_results) < 1:
raise ValueError(f"Failed to validate block {header_hash} height {block.height}")
if pre_validation_results[0].error is not None:
if Err(pre_validation_results[0].error) == Err.INVALID_PREV_BLOCK_HASH:
added = ReceiveBlockResult.DISCONNECTED_BLOCK
added = AddBlockResult.DISCONNECTED_BLOCK
error_code: Optional[Err] = Err.INVALID_PREV_BLOCK_HASH
else:
raise ValueError(
@ -1685,36 +1685,36 @@ class FullNode:
pre_validation_results[0] if pre_validation_result is None else pre_validation_result
)
assert result_to_validate.required_iters == pre_validation_results[0].required_iters
(added, error_code, state_change_summary) = await self.blockchain.receive_block(
(added, error_code, state_change_summary) = await self.blockchain.add_block(
block, result_to_validate, None
)
if added == ReceiveBlockResult.ALREADY_HAVE_BLOCK:
if added == AddBlockResult.ALREADY_HAVE_BLOCK:
return None
elif added == ReceiveBlockResult.INVALID_BLOCK:
elif added == AddBlockResult.INVALID_BLOCK:
assert error_code is not None
self.log.error(f"Block {header_hash} at height {block.height} is invalid with code {error_code}.")
raise ConsensusError(error_code, [header_hash])
elif added == ReceiveBlockResult.DISCONNECTED_BLOCK:
elif added == AddBlockResult.DISCONNECTED_BLOCK:
self.log.info(f"Disconnected block {header_hash} at height {block.height}")
if raise_on_disconnected:
raise RuntimeError("Expected block to be added, received disconnected block.")
return None
elif added == ReceiveBlockResult.NEW_PEAK:
elif added == AddBlockResult.NEW_PEAK:
# Only propagate blocks which extend the blockchain (becomes one of the heads)
assert state_change_summary is not None
ppp_result = await self.peak_post_processing(block, state_change_summary, peer)
elif added == ReceiveBlockResult.ADDED_AS_ORPHAN:
elif added == AddBlockResult.ADDED_AS_ORPHAN:
self.log.info(
f"Received orphan block of height {block.height} rh {block.reward_chain_block.get_hash()}"
)
else:
# Should never reach here, all the cases are covered
raise RuntimeError(f"Invalid result from receive_block {added}")
raise RuntimeError(f"Invalid result from add_block {added}")
except asyncio.CancelledError:
# We need to make sure to always call this method even when we get a cancel exception, to make sure
# the node stays in sync
if added == ReceiveBlockResult.NEW_PEAK:
if added == AddBlockResult.NEW_PEAK:
assert state_change_summary is not None
await self.peak_post_processing(block, state_change_summary, peer)
raise

View File

@ -106,7 +106,7 @@ class FullNodeAPI:
) -> Optional[Message]:
self.log.debug(f"Received {len(request.peer_list)} peers")
if self.full_node.full_node_peers is not None:
await self.full_node.full_node_peers.respond_peers(request, peer.get_peer_info(), True)
await self.full_node.full_node_peers.add_peers(request.peer_list, peer.get_peer_info(), True)
return None
@api_request(peer_required=True)
@ -115,7 +115,7 @@ class FullNodeAPI:
) -> Optional[Message]:
self.log.debug(f"Received {len(request.peer_list)} peers from introducer")
if self.full_node.full_node_peers is not None:
await self.full_node.full_node_peers.respond_peers(request, peer.get_peer_info(), False)
await self.full_node.full_node_peers.add_peers(request.peer_list, peer.get_peer_info(), False)
await peer.close()
return None
@ -1107,7 +1107,6 @@ class FullNodeAPI:
get_name_puzzle_conditions,
block_generator,
self.full_node.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.full_node.constants.COST_PER_BYTE,
mempool_mode=False,
height=request.height,
),

View File

@ -3,7 +3,6 @@ from __future__ import annotations
import asyncio
import dataclasses
import logging
import traceback
from types import TracebackType
from typing import Awaitable, Callable
@ -58,8 +57,7 @@ class LockQueue:
await prioritized_callback.af()
await self._release_event.wait()
except asyncio.CancelledError:
error_stack = traceback.format_exc()
log.debug(f"LockQueue._run() cancelled: {error_stack}")
log.debug("LockQueue._run() cancelled")
def close(self) -> None:
self._run_task.cancel()

View File

@ -29,6 +29,7 @@ class MempoolRemoveReason(Enum):
CONFLICT = 1
BLOCK_INCLUSION = 2
POOL_FULL = 3
EXPIRED = 4
@dataclass(frozen=True)
@ -53,7 +54,7 @@ class Mempool:
# assert_height may be NIL
generated = ""
if not SQLITE_NO_GENERATED_COLUMNS:
generated = " GENERATED ALWAYS AS (fee / cost) VIRTUAL"
generated = " GENERATED ALWAYS AS (CAST(fee AS REAL) / cost) VIRTUAL"
self._db_conn.execute(
f"""CREATE TABLE tx(
@ -61,12 +62,20 @@ class Mempool:
cost INT NOT NULL,
fee INT NOT NULL,
assert_height INT,
assert_before_height INT,
assert_before_seconds INT,
fee_per_cost REAL{generated})
"""
)
self._db_conn.execute("CREATE INDEX fee_sum ON tx(fee)")
self._db_conn.execute("CREATE INDEX cost_sum ON tx(cost)")
self._db_conn.execute("CREATE INDEX feerate ON tx(fee_per_cost)")
self._db_conn.execute(
"CREATE INDEX assert_before_height ON tx(assert_before_height) WHERE assert_before_height != NULL"
)
self._db_conn.execute(
"CREATE INDEX assert_before_seconds ON tx(assert_before_seconds) WHERE assert_before_seconds != NULL"
)
# This table maps coin IDs to spend bundles hashes
self._db_conn.execute(
@ -174,6 +183,21 @@ class Mempool:
else:
return 0
def new_tx_block(self, block_height: uint32, timestamp: uint64) -> None:
"""
Remove all items that became invalid because of this new height and
timestamp. (we don't know about which coins were spent in this new block
here, so those are handled separately)
"""
with self._db_conn:
cursor = self._db_conn.execute(
"SELECT name FROM tx WHERE assert_before_seconds <= ? OR assert_before_height <= ?",
(timestamp, block_height),
)
to_remove = [bytes32(row[0]) for row in cursor]
self.remove_from_pool(to_remove, MempoolRemoveReason.EXPIRED)
def remove_from_pool(self, items: List[bytes32], reason: MempoolRemoveReason) -> None:
"""
Removes an item from the mempool.
@ -232,12 +256,28 @@ class Mempool:
if SQLITE_NO_GENERATED_COLUMNS:
self._db_conn.execute(
"INSERT INTO tx VALUES(?, ?, ?, ?, ?)",
(item.name, item.cost, item.fee, item.assert_height, item.fee / item.cost),
"INSERT INTO tx VALUES(?, ?, ?, ?, ?, ?, ?)",
(
item.name,
item.cost,
item.fee,
item.assert_height,
item.assert_before_height,
item.assert_before_seconds,
item.fee / item.cost,
),
)
else:
self._db_conn.execute(
"INSERT INTO tx VALUES(?, ?, ?, ?)", (item.name, item.cost, item.fee, item.assert_height)
"INSERT INTO tx VALUES(?, ?, ?, ?, ?, ?)",
(
item.name,
item.cost,
item.fee,
item.assert_height,
item.assert_before_height,
item.assert_before_seconds,
),
)
all_coin_spends = [(s.coin_id, item.name) for s in item.npc_result.conds.spends]

View File

@ -3,7 +3,7 @@ from __future__ import annotations
import logging
from typing import Dict, List, Optional, Tuple
from chia_rs import LIMIT_STACK, MEMPOOL_MODE
from chia_rs import ENABLE_ASSERT_BEFORE, LIMIT_STACK, MEMPOOL_MODE, NO_RELATIVE_CONDITIONS_ON_EPHEMERAL
from chia_rs import get_puzzle_and_solution_for_coin as get_puzzle_and_solution_for_coin_rust
from chia_rs import run_block_generator, run_chia_program
from clvm.casts import int_from_bytes
@ -37,25 +37,20 @@ def get_name_puzzle_conditions(
generator: BlockGenerator,
max_cost: int,
*,
cost_per_byte: int,
mempool_mode: bool,
height: Optional[uint32] = None,
height: uint32,
constants: ConsensusConstants = DEFAULT_CONSTANTS,
) -> NPCResult:
# in mempool mode, the height doesn't matter, because it's always strict.
# But otherwise, height must be specified to know which rules to apply
assert mempool_mode or height is not None
if mempool_mode:
flags = MEMPOOL_MODE
elif height is not None and height >= constants.SOFT_FORK_HEIGHT:
elif height >= constants.SOFT_FORK_HEIGHT:
flags = LIMIT_STACK
else:
flags = 0
# soft-fork2 is disabled (for now)
# if height is not None and height >= constants.SOFT_FORK2_HEIGHT:
# flags = flags | ENABLE_ASSERT_BEFORE
if height >= constants.SOFT_FORK2_HEIGHT:
flags = flags | ENABLE_ASSERT_BEFORE | NO_RELATIVE_CONDITIONS_ON_EPHEMERAL
try:
block_args = [bytes(gen) for gen in generator.generator_refs]
@ -134,6 +129,12 @@ def mempool_check_time_locks(
return Err.ASSERT_HEIGHT_ABSOLUTE_FAILED
if timestamp < bundle_conds.seconds_absolute:
return Err.ASSERT_SECONDS_ABSOLUTE_FAILED
if bundle_conds.before_height_absolute is not None:
if prev_transaction_block_height >= bundle_conds.before_height_absolute:
return Err.ASSERT_BEFORE_HEIGHT_ABSOLUTE_FAILED
if bundle_conds.before_seconds_absolute is not None:
if timestamp >= bundle_conds.before_seconds_absolute:
return Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED
for spend in bundle_conds.spends:
unspent = removal_coin_records[bytes32(spend.coin_id)]
@ -149,4 +150,11 @@ def mempool_check_time_locks(
if spend.seconds_relative is not None:
if timestamp < unspent.timestamp + spend.seconds_relative:
return Err.ASSERT_SECONDS_RELATIVE_FAILED
if spend.before_height_relative is not None:
if prev_transaction_block_height >= unspent.confirmed_block_index + spend.before_height_relative:
return Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED
if spend.before_seconds_relative is not None:
if timestamp >= unspent.timestamp + spend.before_seconds_relative:
return Err.ASSERT_BEFORE_SECONDS_RELATIVE_FAILED
return None

View File

@ -5,8 +5,9 @@ import logging
import time
from concurrent.futures import Executor
from concurrent.futures.process import ProcessPoolExecutor
from dataclasses import dataclass
from multiprocessing.context import BaseContext
from typing import Awaitable, Callable, Dict, List, Optional, Set, Tuple
from typing import Awaitable, Callable, Dict, List, Optional, Set, Tuple, TypeVar
from blspy import GTElement
from chiabip158 import PyBIP158
@ -59,7 +60,6 @@ def validate_clvm_and_signature(
the NPCResult and a cache of the new pairings validated (if not error)
"""
cost_per_byte = constants.COST_PER_BYTE
additional_data = constants.AGG_SIG_ME_ADDITIONAL_DATA
try:
@ -67,7 +67,7 @@ def validate_clvm_and_signature(
program = simple_solution_generator(bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
result: NPCResult = get_name_puzzle_conditions(
program, max_cost, cost_per_byte=cost_per_byte, mempool_mode=True, constants=constants, height=height
program, max_cost, mempool_mode=True, constants=constants, height=height
)
if result.error is not None:
@ -93,25 +93,54 @@ def validate_clvm_and_signature(
return None, bytes(result), new_cache_entries
@dataclass
class TimelockConditions:
assert_height: uint32 = uint32(0)
assert_before_height: Optional[uint32] = None
assert_before_seconds: Optional[uint64] = None
def compute_assert_height(
removal_coin_records: Dict[bytes32, CoinRecord],
conds: SpendBundleConditions,
) -> uint32:
) -> TimelockConditions:
"""
Computes the most restrictive height assertion in the spend bundle. Relative
height assertions are resolved using the confirmed heights from the coin
records.
Computes the most restrictive height- and seconds assertion in the spend bundle.
Relative heights and times are resolved using the confirmed heights and
timestamps from the coin records.
"""
height: uint32 = uint32(conds.height_absolute)
ret = TimelockConditions()
ret.assert_height = uint32(conds.height_absolute)
ret.assert_before_height = (
uint32(conds.before_height_absolute) if conds.before_height_absolute is not None else None
)
ret.assert_before_seconds = (
uint64(conds.before_seconds_absolute) if conds.before_seconds_absolute is not None else None
)
for spend in conds.spends:
if spend.height_relative is None:
continue
h = uint32(removal_coin_records[bytes32(spend.coin_id)].confirmed_block_index + spend.height_relative)
height = max(height, h)
if spend.height_relative is not None:
h = uint32(removal_coin_records[bytes32(spend.coin_id)].confirmed_block_index + spend.height_relative)
ret.assert_height = max(ret.assert_height, h)
return height
if spend.before_height_relative is not None:
h = uint32(
removal_coin_records[bytes32(spend.coin_id)].confirmed_block_index + spend.before_height_relative
)
if ret.assert_before_height is not None:
ret.assert_before_height = min(ret.assert_before_height, h)
else:
ret.assert_before_height = h
if spend.before_seconds_relative is not None:
s = uint64(removal_coin_records[bytes32(spend.coin_id)].timestamp + spend.before_seconds_relative)
if ret.assert_before_seconds is not None:
ret.assert_before_seconds = min(ret.assert_before_seconds, s)
else:
ret.assert_before_seconds = s
return ret
class MempoolManager:
@ -472,23 +501,31 @@ class MempoolManager:
log.warning(f"{spend.puzzle_hash.hex()} != {coin_record.coin.puzzle_hash.hex()}")
return Err.WRONG_PUZZLE_HASH, None, []
chialisp_height = (
self.peak.prev_transaction_block_height if not self.peak.is_transaction_block else self.peak.height
)
# the height and time we pass in here represent the previous transaction
# block's height and timestamp. In the mempool, the most recent peak
# block we've received will be the previous transaction block, from the
# point-of-view of the next block to be farmed. Therefore we pass in the
# current peak's height and timestamp
assert self.peak.timestamp is not None
tl_error: Optional[Err] = mempool_check_time_locks(
removal_record_dict,
npc_result.conds,
uint32(chialisp_height),
self.peak.height,
self.peak.timestamp,
)
assert_height: Optional[uint32] = None
if tl_error:
assert_height = compute_assert_height(removal_record_dict, npc_result.conds)
timelocks: TimelockConditions = compute_assert_height(removal_record_dict, npc_result.conds)
potential = MempoolItem(new_spend, uint64(fees), npc_result, spend_name, first_added_height, assert_height)
potential = MempoolItem(
new_spend,
uint64(fees),
npc_result,
spend_name,
first_added_height,
timelocks.assert_height,
timelocks.assert_before_height,
timelocks.assert_before_seconds,
)
if tl_error:
if tl_error is Err.ASSERT_HEIGHT_ABSOLUTE_FAILED or tl_error is Err.ASSERT_HEIGHT_RELATIVE_FAILED:
@ -528,8 +565,7 @@ class MempoolManager:
return Err.DOUBLE_SPEND, set()
# 2. Checks if there's a mempool conflict
items: List[MempoolItem] = self.mempool.get_spends_by_coin_id(removal.name())
for item in items:
conflicts.add(item)
conflicts.update(items)
if len(conflicts) > 0:
return Err.MEMPOOL_CONFLICT, conflicts
@ -567,6 +603,7 @@ class MempoolManager:
"""
if new_peak is None:
return []
# we're only interested in transaction blocks
if new_peak.is_transaction_block is False:
return []
if self.peak == new_peak:
@ -575,6 +612,8 @@ class MempoolManager:
self.fee_estimator.new_block_height(new_peak.height)
included_items: List[MempoolItemInfo] = []
self.mempool.new_tx_block(new_peak.height, new_peak.timestamp)
use_optimization: bool = self.peak is not None and new_peak.prev_transaction_block_hash == self.peak.header_hash
self.peak = new_peak
@ -644,6 +683,17 @@ class MempoolManager:
return items
T = TypeVar("T", uint32, uint64)
def optional_min(a: Optional[T], b: Optional[T]) -> Optional[T]:
return min((v for v in [a, b] if v is not None), default=None)
def optional_max(a: Optional[T], b: Optional[T]) -> Optional[T]:
return max((v for v in [a, b] if v is not None), default=None)
def can_replace(
conflicting_items: Set[MempoolItem],
removal_names: Set[bytes32],
@ -659,6 +709,9 @@ def can_replace(
conflicting_fees = 0
conflicting_cost = 0
assert_height: Optional[uint32] = None
assert_before_height: Optional[uint32] = None
assert_before_seconds: Optional[uint64] = None
for item in conflicting_items:
conflicting_fees += item.fee
conflicting_cost += item.cost
@ -673,6 +726,10 @@ def can_replace(
log.debug(f"Rejecting conflicting tx as it does not spend conflicting coin {coin.name()}")
return False
assert_height = optional_max(assert_height, item.assert_height)
assert_before_height = optional_min(assert_before_height, item.assert_before_height)
assert_before_seconds = optional_min(assert_before_seconds, item.assert_before_seconds)
# New item must have higher fee per cost
conflicting_fees_per_cost = conflicting_fees / conflicting_cost
if new_item.fee_per_cost <= conflicting_fees_per_cost:
@ -688,5 +745,30 @@ def can_replace(
log.debug(f"Rejecting conflicting tx due to low fee increase ({fee_increase})")
return False
# New item may not have a different effective height/time lock (time-lock rule)
if new_item.assert_height != assert_height:
log.debug(
"Rejecting conflicting tx due to changing ASSERT_HEIGHT constraints %s -> %s",
assert_height,
new_item.assert_height,
)
return False
if new_item.assert_before_height != assert_before_height:
log.debug(
"Rejecting conflicting tx due to changing ASSERT_BEFORE_HEIGHT constraints %s -> %s",
assert_before_height,
new_item.assert_before_height,
)
return False
if new_item.assert_before_seconds != assert_before_seconds:
log.debug(
"Rejecting conflicting tx due to changing ASSERT_BEFORE_SECONDS constraints %s -> %s",
assert_before_seconds,
new_item.assert_before_seconds,
)
return False
log.info(f"Replacing conflicting tx in mempool. New tx fee: {new_item.fee}, old tx fees: {conflicting_fees}")
return True

View File

@ -45,7 +45,7 @@ from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
from chia.types.coin_spend import CoinSpend, compute_additions
from chia.types.spend_bundle import SpendBundle
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.ints import uint32, uint64, uint128
from chia.wallet.derive_keys import find_owner_sk
from chia.wallet.sign_coin_spends import sign_coin_spends
from chia.wallet.transaction_record import TransactionRecord
@ -119,8 +119,8 @@ class PoolWallet:
"""
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.POOLING_WALLET)
def type(cls) -> WalletType:
return WalletType.POOLING_WALLET
def id(self) -> uint32:
return self.wallet_info.id
@ -360,7 +360,7 @@ class PoolWallet:
await pool_wallet.update_pool_config()
p2_puzzle_hash: bytes32 = (await pool_wallet.get_current_state()).p2_singleton_puzzle_hash
await wallet_state_manager.add_new_wallet(pool_wallet, pool_wallet.wallet_id, create_puzzle_hashes=False)
await wallet_state_manager.add_new_wallet(pool_wallet)
await wallet_state_manager.add_interested_puzzle_hashes([p2_puzzle_hash], [pool_wallet.wallet_id])
return pool_wallet

View File

@ -30,7 +30,7 @@ from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config
from chia.util.errors import KeychainIsLocked
from chia.util.ints import uint8, uint16, uint32, uint64
from chia.util.ints import uint16, uint32, uint64
from chia.util.keychain import bytes_to_mnemonic, generate_mnemonic
from chia.util.path import path_from_root
from chia.util.ws_message import WsRpcMessage, create_payload_dict
@ -2075,7 +2075,7 @@ class WalletRpcApi:
pass
wallet_id = did_wallet.id()
wallet_type = WalletType(did_wallet.type())
wallet_type = did_wallet.type()
assert coin_state.created_height is not None
coin_record: WalletCoinRecord = WalletCoinRecord(
coin_state.coin, uint32(coin_state.created_height), uint32(0), False, False, wallet_type, wallet_id
@ -2589,7 +2589,7 @@ class WalletRpcApi:
did_wallets_by_did_id: Dict[bytes32, uint32] = {}
for wallet in all_wallets:
if wallet.type() == uint8(WalletType.DECENTRALIZED_ID):
if wallet.type() == WalletType.DECENTRALIZED_ID:
assert isinstance(wallet, DIDWallet)
if wallet.did_info.origin_coin is not None:
did_wallets_by_did_id[wallet.did_info.origin_coin.name()] = wallet.id()

View File

@ -8,12 +8,12 @@ import traceback
from logging import Logger
from random import Random
from secrets import randbits
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from typing import Any, Dict, List, Optional, Set, Tuple
import dns.asyncresolver
from chia.protocols.full_node_protocol import RequestPeers, RespondPeers
from chia.protocols.introducer_protocol import RequestPeersIntroducer, RespondPeersIntroducer
from chia.protocols.introducer_protocol import RequestPeersIntroducer
from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.server.address_manager import AddressManager, ExtendedPeerInfo
from chia.server.address_manager_sqlite_store import create_address_manager_from_db
@ -237,7 +237,7 @@ class FullNodeDiscovery:
)
self.log.info(f"Received {len(peers)} peers from DNS seeder, using rdtype = {rdtype}.")
if len(peers) > 0:
await self._respond_peers_common(RespondPeers(peers), None, False)
await self._add_peers_common(peers, None, False)
except Exception as e:
self.log.warning(f"querying DNS introducer failed: {e}")
@ -467,13 +467,13 @@ class FullNodeDiscovery:
async with self.address_manager.lock:
self.address_manager.cleanup(max_timestamp_difference, max_consecutive_failures)
async def _respond_peers_common(
self, request: Union[RespondPeers, RespondPeersIntroducer], peer_src: Optional[PeerInfo], is_full_node: bool
async def _add_peers_common(
self, peer_list: List[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool
) -> None:
# Check if we got the peers from a full node or from the introducer.
peers_adjusted_timestamp = []
is_misbehaving = False
if len(request.peer_list) > MAX_PEERS_RECEIVED_PER_REQUEST:
if len(peer_list) > MAX_PEERS_RECEIVED_PER_REQUEST:
is_misbehaving = True
if is_full_node:
if peer_src is None:
@ -481,12 +481,12 @@ class FullNodeDiscovery:
async with self.lock:
if peer_src.host not in self.received_count_from_peers:
self.received_count_from_peers[peer_src.host] = 0
self.received_count_from_peers[peer_src.host] += len(request.peer_list)
self.received_count_from_peers[peer_src.host] += len(peer_list)
if self.received_count_from_peers[peer_src.host] > MAX_TOTAL_PEERS_RECEIVED:
is_misbehaving = True
if is_misbehaving:
return None
for peer in request.peer_list:
for peer in peer_list:
if peer.timestamp < 100000000 or peer.timestamp > time.time() + 10 * 60:
# Invalid timestamp, predefine a bad one.
current_peer = TimestampedPeerInfo(
@ -621,17 +621,17 @@ class FullNodePeers(FullNodeDiscovery):
self.log.error(f"Request peers exception: {e}")
return None
async def respond_peers(
self, request: Union[RespondPeers, RespondPeersIntroducer], peer_src: Optional[PeerInfo], is_full_node: bool
async def add_peers(
self, peer_list: List[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool
) -> None:
try:
await self._respond_peers_common(request, peer_src, is_full_node)
await self._add_peers_common(peer_list, peer_src, is_full_node)
if is_full_node:
if peer_src is None:
return
await self.add_peers_neighbour(request.peer_list, peer_src)
if len(request.peer_list) == 1 and self.relay_queue is not None:
peer = request.peer_list[0]
await self.add_peers_neighbour(peer_list, peer_src)
if len(peer_list) == 1 and self.relay_queue is not None:
peer = peer_list[0]
if peer.timestamp > time.time() - 60 * 10:
self.relay_queue.put_nowait((peer, 2))
except Exception as e:
@ -731,7 +731,7 @@ class WalletPeers(FullNodeDiscovery):
return None
await self._close_common()
async def respond_peers(
self, request: Union[RespondPeers, RespondPeersIntroducer], peer_src: Optional[PeerInfo], is_full_node: bool
async def add_peers(
self, peer_list: List[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool
) -> None:
await self._respond_peers_common(request, peer_src, is_full_node)
await self._add_peers_common(peer_list, peer_src, is_full_node)

View File

@ -52,13 +52,11 @@ def create_wallet_service(
fnp = service_config.get("full_node_peer")
if fnp:
node.full_node_peer = PeerInfo(
str(get_host_addr(fnp["host"], prefer_ipv6=config.get("prefer_ipv6", False))), fnp["port"]
)
connect_peers = [node.full_node_peer]
connect_peers = [
PeerInfo(str(get_host_addr(fnp["host"], prefer_ipv6=config.get("prefer_ipv6", False))), fnp["port"])
]
else:
connect_peers = []
node.full_node_peer = None
network_id = service_config["selected_network"]
rpc_port = service_config.get("rpc_port")
rpc_info: Optional[RpcInfo] = None

View File

@ -190,7 +190,7 @@ class FullNodeSimulator(FullNodeAPI):
[genesis], {}, validate_signatures=True
)
assert pre_validation_results is not None
await self.full_node.blockchain.receive_block(genesis, pre_validation_results[0])
await self.full_node.blockchain.add_block(genesis, pre_validation_results[0])
peak = self.full_node.blockchain.get_peak()
assert peak is not None
@ -239,7 +239,7 @@ class FullNodeSimulator(FullNodeAPI):
[genesis], {}, validate_signatures=True
)
assert pre_validation_results is not None
await self.full_node.blockchain.receive_block(genesis, pre_validation_results[0])
await self.full_node.blockchain.add_block(genesis, pre_validation_results[0])
peak = self.full_node.blockchain.get_peak()
assert peak is not None

View File

@ -15,7 +15,7 @@ from chia.util.hash import std_hash
from .tree_hash import sha256_treehash
INFINITE_COST = 0x7FFFFFFFFFFFFFFF
INFINITE_COST = 11000000000
class Program(SExp):

View File

@ -35,6 +35,7 @@ class ConditionOpcode(bytes, enum.Enum):
ASSERT_MY_AMOUNT = bytes([73])
ASSERT_MY_BIRTH_SECONDS = bytes([74])
ASSERT_MY_BIRTH_HEIGHT = bytes([75])
ASSERT_EPHEMERAL = bytes([76])
# the conditions below ensure that we're "far enough" in the future

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import dataclass, field
from typing import List
from chia.types.blockchain_format.serialized_program import SerializedProgram
@ -20,7 +20,7 @@ class CompressorArg:
"""`CompressorArg` is used as input to the Block Compressor"""
block_height: uint32
generator: SerializedProgram
generator: SerializedProgram = field(repr=False)
start: int
end: int

View File

@ -23,6 +23,11 @@ class MempoolItem:
# If present, this SpendBundle is not valid at or before this height
assert_height: Optional[uint32] = None
# If presemt, this SpendBundle is not valid once the block height reaches
# the specified height
assert_before_height: Optional[uint32] = None
assert_before_seconds: Optional[uint64] = None
def __lt__(self, other: MempoolItem) -> bool:
return self.fee_per_cost < other.fee_per_cost

View File

@ -173,6 +173,9 @@ class Err(Enum):
ASSERT_MY_BIRTH_SECONDS_FAILED = 138
ASSERT_MY_BIRTH_HEIGHT_FAILED = 139
ASSERT_EPHEMERAL_FAILED = 140
EPHEMERAL_RELATIVE_CONDITION = 141
class ValidationError(Exception):
def __init__(self, code: Err, error_msg: str = ""):

View File

@ -35,7 +35,7 @@ class CATOuterPuzzle:
_, tail_hash, inner_puzzle = args
constructor_dict = {
"type": "CAT",
"tail": "0x" + tail_hash.as_python().hex(),
"tail": "0x" + tail_hash.atom.hex(),
}
next_constructor = self._match(uncurry_puzzle(inner_puzzle))
if next_constructor is not None:

View File

@ -24,7 +24,7 @@ from chia.types.spend_bundle import SpendBundle
from chia.util.byte_types import hexstr_to_bytes
from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.ints import uint32, uint64, uint128
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from chia.wallet.cat_wallet.cat_info import CATInfo, LegacyCATInfo
from chia.wallet.cat_wallet.cat_utils import (
@ -125,7 +125,7 @@ class CATWallet:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise ValueError("Failed to create spend.")
await self.wallet_state_manager.add_new_wallet(self, self.id())
await self.wallet_state_manager.add_new_wallet(self)
# If the new CAT name wasn't originally provided, we used a temporary name before issuance
# since we didn't yet know the TAIL. Now we know the TAIL, we can update the name
@ -207,7 +207,7 @@ class CATWallet:
self.wallet_info = await wallet_state_manager.user_store.create_wallet(name, WalletType.CAT, info_as_string)
self.lineage_store = await CATLineageStore.create(self.wallet_state_manager.db_wrapper, self.get_asset_id())
await self.wallet_state_manager.add_new_wallet(self, self.id())
await self.wallet_state_manager.add_new_wallet(self)
return self
@classmethod
@ -254,8 +254,8 @@ class CATWallet:
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.CAT)
def type(cls) -> WalletType:
return WalletType.CAT
def id(self) -> uint32:
return self.wallet_info.id
@ -289,11 +289,11 @@ class CATWallet:
assert txs[0].spend_bundle
program: BlockGenerator = simple_solution_generator(txs[0].spend_bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
# we use height=0 here to not enable any soft-fork semantics. It
# will only matter once the wallet generates transactions relying on
# new conditions, and we can change this by then
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
mempool_mode=True,
program, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM, mempool_mode=True, height=uint32(0)
)
self.cost_of_single_tx = result.cost
self.log.info(f"Cost of a single tx for CAT wallet: {self.cost_of_single_tx}")

View File

@ -22,7 +22,7 @@ from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_spend import CoinSpend
from chia.types.spend_bundle import SpendBundle
from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.ints import uint32, uint64, uint128
from chia.wallet.coin_selection import select_coins
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.derive_keys import master_sk_to_wallet_sk_unhardened
@ -122,7 +122,7 @@ class DIDWallet:
if spend_bundle is None:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise ValueError("Failed to create spend.")
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id)
await self.wallet_state_manager.add_new_wallet(self)
return self
@ -157,7 +157,7 @@ class DIDWallet:
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
name, WalletType.DECENTRALIZED_ID.value, info_as_string
)
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id)
await self.wallet_state_manager.add_new_wallet(self)
await self.save_info(self.did_info)
await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id)
await self.load_parent(self.did_info)
@ -232,7 +232,7 @@ class DIDWallet:
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
name, WalletType.DECENTRALIZED_ID.value, info_as_string
)
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id)
await self.wallet_state_manager.add_new_wallet(self)
await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id)
await self.load_parent(self.did_info)
self.log.info(f"New DID wallet created {info_as_string}.")
@ -269,8 +269,8 @@ class DIDWallet:
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.DECENTRALIZED_ID)
def type(cls) -> WalletType:
return WalletType.DECENTRALIZED_ID
def id(self) -> uint32:
return self.wallet_info.id

View File

@ -3,8 +3,6 @@ from __future__ import annotations
from dataclasses import dataclass
from typing import Callable, List, Optional, Tuple
from clvm_tools.binutils import disassemble
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
@ -45,8 +43,8 @@ class MetadataOuterPuzzle:
_, metadata, updater_hash, inner_puzzle = curried_args
constructor_dict = {
"type": "metadata",
"metadata": disassemble(metadata),
"updater_hash": "0x" + updater_hash.as_python().hex(),
"metadata": metadata,
"updater_hash": "0x" + updater_hash.as_atom().hex(),
}
next_constructor = self._match(uncurry_puzzle(inner_puzzle))
if next_constructor is not None:

View File

@ -21,7 +21,7 @@ from chia.types.coin_spend import CoinSpend, compute_additions
from chia.types.spend_bundle import SpendBundle
from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint16, uint32, uint64, uint128
from chia.util.ints import uint16, uint32, uint64, uint128
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.did_wallet import did_wallet_puzzles
from chia.wallet.did_wallet.did_info import DIDInfo
@ -49,7 +49,6 @@ from chia.wallet.trading.offer import (
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.uncurried_puzzle import uncurry_puzzle
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.debug_spend_bundle import disassemble
from chia.wallet.util.transaction_type import TransactionType
from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
from chia.wallet.wallet import CHIP_0002_SIGN_MESSAGE_PREFIX, Wallet
@ -99,7 +98,7 @@ class NFTWallet:
self.nft_store = wallet_state_manager.nft_store
self.log.debug("NFT wallet id: %r and standard wallet id: %r", self.wallet_id, self.standard_wallet.wallet_id)
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id)
await self.wallet_state_manager.add_new_wallet(self)
self.log.debug("Generated a new NFT wallet: %s", self.__dict__)
return self
@ -123,8 +122,8 @@ class NFTWallet:
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.NFT)
def type(cls) -> WalletType:
return WalletType.NFT
def id(self) -> uint32:
return self.wallet_info.id
@ -373,7 +372,7 @@ class NFTWallet:
inner_puzzle = create_ownership_layer_puzzle(
launcher_coin.name(), b"", p2_inner_puzzle, percentage, royalty_puzzle_hash=royalty_puzzle_hash
)
self.log.debug("Got back ownership inner puzzle: %s", disassemble(inner_puzzle))
self.log.debug("Got back ownership inner puzzle: %s", inner_puzzle)
else:
self.log.debug("Creating standard NFT")
inner_puzzle = p2_inner_puzzle
@ -1304,7 +1303,7 @@ class NFTWallet:
"""
# get DID Wallet
for wallet in self.wallet_state_manager.wallets.values():
if wallet.type() == WalletType.DECENTRALIZED_ID.value:
if wallet.type() == WalletType.DECENTRALIZED_ID:
if self.get_did() == bytes32.from_hexstr(wallet.get_my_DID()):
did_wallet = wallet
break

View File

@ -31,10 +31,13 @@ class SingletonOuterPuzzle:
matched, curried_args = match_singleton_puzzle(puzzle)
if matched:
singleton_struct, inner_puzzle = curried_args
launcher_struct = singleton_struct.pair[1].pair
launcher_id = launcher_struct[0].atom
launcher_ph = launcher_struct[1].atom
constructor_dict = {
"type": "singleton",
"launcher_id": "0x" + singleton_struct.as_python()[1].hex(),
"launcher_ph": "0x" + singleton_struct.as_python()[2].hex(),
"launcher_id": "0x" + launcher_id.hex(),
"launcher_ph": "0x" + launcher_ph.hex(),
}
next_constructor = self._match(uncurry_puzzle(inner_puzzle))
if next_constructor is not None:

View File

@ -99,6 +99,8 @@ def decode_info_value(cls: Any, value: Any) -> Any:
return cls(value)
elif isinstance(value, list):
return [decode_info_value(cls, v) for v in value]
elif isinstance(value, Program) and value.atom is None:
return value
else:
if value == "()": # special case
return Program.to([])

View File

@ -19,12 +19,20 @@
(defconstant CREATE_PUZZLE_ANNOUNCEMENT 62)
(defconstant ASSERT_PUZZLE_ANNOUNCEMENT 63)
; coin-id
(defconstant ASSERT_CONCURRENT_SPEND 64)
; puzzle-hash
(defconstant ASSERT_CONCURRENT_PUZZLE 65)
; the conditions below let coins inquire about themselves
(defconstant ASSERT_MY_COIN_ID 70)
(defconstant ASSERT_MY_PARENT_ID 71)
(defconstant ASSERT_MY_PUZZLEHASH 72)
(defconstant ASSERT_MY_AMOUNT 73)
(defconstant ASSERT_MY_BIRTH_SECONDS 74)
(defconstant ASSERT_MY_BIRTH_HEIGHT 75)
(defconstant ASSERT_EPHEMERAL 76)
; the conditions below ensure that we're "far enough" in the future
@ -36,6 +44,16 @@
(defconstant ASSERT_HEIGHT_RELATIVE 82)
(defconstant ASSERT_HEIGHT_ABSOLUTE 83)
; the conditions below ensure that we're "not too far" in the future
; wall-clock time
(defconstant ASSERT_BEFORE_SECONDS_RELATIVE 84)
(defconstant ASSERT_BEFORE_SECONDS_ABSOLUTE 85)
; block index
(defconstant ASSERT_BEFORE_HEIGHT_RELATIVE 86)
(defconstant ASSERT_BEFORE_HEIGHT_ABSOLUTE 87)
; A condition that is always true and always ignore all arguments
(defconstant REMARK 1)
)

View File

@ -3,7 +3,7 @@ from __future__ import annotations
import dataclasses
import logging
import time
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from typing import Any, Dict, List, Optional, Set, Tuple, Union, cast
from typing_extensions import Literal
@ -137,10 +137,9 @@ class TradeManager:
# Then let's filter the offer into coins that WE offered
offer = Offer.from_bytes(trade.offer)
primary_coin_ids = [c.name() for c in offer.removals()]
our_coin_records: List[WalletCoinRecord] = await self.wallet_state_manager.coin_store.get_multiple_coin_records(
primary_coin_ids
)
our_primary_coins: List[Coin] = [cr.coin for cr in our_coin_records]
# TODO: Add `WalletCoinStore.get_coins`.
our_coin_records = await self.wallet_state_manager.coin_store.get_coin_records(primary_coin_ids)
our_primary_coins: List[Coin] = [cr.coin for cr in our_coin_records.values()]
our_additions: List[Coin] = list(
filter(lambda c: offer.get_root_removal(c) in our_primary_coins, offer.additions())
)
@ -176,7 +175,7 @@ class TradeManager:
await self.trade_store.set_status(trade.trade_id, TradeStatus.FAILED)
self.log.warning(f"Trade with id: {trade.trade_id} failed")
async def get_locked_coins(self, wallet_id: Optional[int] = None) -> Dict[bytes32, WalletCoinRecord]:
async def get_locked_coins(self) -> Dict[bytes32, WalletCoinRecord]:
"""Returns a dictionary of confirmed coins that are locked by a trade."""
all_pending = []
pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT)
@ -190,13 +189,13 @@ class TradeManager:
for trade_offer in all_pending:
coins_of_interest.extend([c.name() for c in trade_offer.coins_of_interest])
result = {}
coin_records = await self.wallet_state_manager.coin_store.get_multiple_coin_records(coins_of_interest)
for record in coin_records:
if wallet_id is None or record.wallet_id == wallet_id:
result[record.name()] = record
return result
# TODO:
# - No need to get the coin records here, we are only interested in the coin_id on the call site.
# - The cast here is required for now because TradeManager.wallet_state_manager is hinted as Any.
return cast(
Dict[bytes32, WalletCoinRecord],
await self.wallet_state_manager.coin_store.get_coin_records(coins_of_interest),
)
async def get_all_trades(self) -> List[TradeRecord]:
all: List[TradeRecord] = await self.trade_store.get_all_trades()

View File

@ -78,6 +78,8 @@ class Offer:
# this is a cache of the coin additions made by the SpendBundle (_bundle)
# ordered by the coin being spent
_additions: Dict[Coin, List[Coin]] = field(init=False)
_offered_coins: Dict[Optional[bytes32], List[Coin]] = field(init=False)
_final_spend_bundle: Optional[SpendBundle] = field(init=False)
@staticmethod
def ph() -> bytes32:
@ -173,7 +175,7 @@ class Offer:
# This method does not get every coin that is being offered, only the `settlement_payment` children
# It's also a little heuristic, but it should get most things
def get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]:
def _get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]:
offered_coins: Dict[Optional[bytes32], List[Coin]] = {}
for parent_spend in self._bundle.coin_spends:
@ -239,9 +241,16 @@ class Offer:
if coins_for_this_spend != []:
offered_coins.setdefault(asset_id, [])
offered_coins[asset_id].extend(coins_for_this_spend)
return offered_coins
def get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]:
try:
if self._offered_coins is not None:
return self._offered_coins
except AttributeError:
object.__setattr__(self, "_offered_coins", self._get_offered_coins())
return self._offered_coins
def get_offered_amounts(self) -> Dict[Optional[bytes32], int]:
offered_coins: Dict[Optional[bytes32], List[Coin]] = self.get_offered_coins()
offered_amounts: Dict[Optional[bytes32], int] = {}
@ -535,7 +544,12 @@ class Offer:
return SpendBundle.aggregate([SpendBundle(completion_spends, G2Element()), self._bundle])
def to_spend_bundle(self) -> SpendBundle:
# Before we serialze this as a SpendBundle, we need to serialze the `requested_payments` as dummy CoinSpends
try:
if self._final_spend_bundle is not None:
return self._final_spend_bundle
except AttributeError:
pass
# Before we serialize this as a SpendBundle, we need to serialize the `requested_payments` as dummy CoinSpends
additional_coin_spends: List[CoinSpend] = []
for asset_id, payments in self.requested_payments.items():
puzzle_reveal: Program = construct_puzzle(self.driver_dict[asset_id], OFFER_MOD) if asset_id else OFFER_MOD
@ -557,12 +571,14 @@ class Offer:
)
)
return SpendBundle.aggregate(
sb = SpendBundle.aggregate(
[
SpendBundle(additional_coin_spends, G2Element()),
self._bundle,
]
)
object.__setattr__(self, "_final_spend_bundle", sb)
return sb
@classmethod
def from_spend_bundle(cls, bundle: SpendBundle) -> Offer:
@ -600,6 +616,11 @@ class Offer:
def name(self) -> bytes32:
return self.to_spend_bundle().name()
def __eq__(self, other: object) -> bool:
if not isinstance(other, Offer):
return False # don't attempt to compare against unrelated types
return self.name() == other.name()
def compress(self, version: Optional[int] = None) -> bytes:
as_spend_bundle = self.to_spend_bundle()
if version is None:

View File

@ -64,7 +64,7 @@ def compress_with_zdict(blob: bytes, zdict: bytes) -> bytes:
def decompress_with_zdict(blob: bytes, zdict: bytes) -> bytes:
do = zlib.decompressobj(zdict=zdict)
return do.decompress(blob)
return do.decompress(blob, max_length=6 * 1024 * 1024) # Limit output size
def decompress_object_with_puzzles(compressed_object_blob: bytes) -> bytes:

View File

@ -18,7 +18,7 @@ from chia.types.coin_spend import CoinSpend
from chia.types.generator_types import BlockGenerator
from chia.types.spend_bundle import SpendBundle
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.ints import uint32, uint64, uint128
from chia.wallet.coin_selection import select_coins
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
@ -55,6 +55,7 @@ CHIP_0002_SIGN_MESSAGE_PREFIX = "Chia Signed Message"
class Wallet:
wallet_info: WalletInfo
wallet_state_manager: Any
log: logging.Logger
wallet_id: uint32
@ -90,11 +91,11 @@ class Wallet:
assert tx.spend_bundle is not None
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
# we use height=0 here to not enable any soft-fork semantics. It
# will only matter once the wallet generates transactions relying on
# new conditions, and we can change this by then
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
mempool_mode=True,
program, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM, mempool_mode=True, height=uint32(0)
)
self.cost_of_single_tx = result.cost
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
@ -113,8 +114,8 @@ class Wallet:
return uint128(total_amount)
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.STANDARD_WALLET)
def type(cls) -> WalletType:
return WalletType.STANDARD_WALLET
def id(self) -> uint32:
return self.wallet_id

View File

@ -5,7 +5,7 @@ from typing import Dict, List, Optional, Tuple
from chia.consensus.block_header_validation import validate_finished_header_block
from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain import ReceiveBlockResult
from chia.consensus.blockchain import AddBlockResult
from chia.consensus.blockchain_interface import BlockchainInterface
from chia.consensus.constants import ConsensusConstants
from chia.consensus.find_fork_point import find_fork_point_in_chain
@ -89,11 +89,11 @@ class WalletBlockchain(BlockchainInterface):
await self.set_peak_block(weight_proof.recent_chain_data[-1], latest_timestamp)
await self.clean_block_records()
async def receive_block(self, block: HeaderBlock) -> Tuple[ReceiveBlockResult, Optional[Err]]:
async def receive_block(self, block: HeaderBlock) -> Tuple[AddBlockResult, Optional[Err]]:
if self.contains_block(block.header_hash):
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None
return AddBlockResult.ALREADY_HAVE_BLOCK, None
if not self.contains_block(block.prev_header_hash) and block.height > 0:
return ReceiveBlockResult.DISCONNECTED_BLOCK, None
return AddBlockResult.DISCONNECTED_BLOCK, None
if (
len(block.finished_sub_slots) > 0
and block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None
@ -110,9 +110,9 @@ class WalletBlockchain(BlockchainInterface):
self.constants, self, block, False, difficulty, sub_slot_iters, False
)
if error is not None:
return ReceiveBlockResult.INVALID_BLOCK, error.code
return AddBlockResult.INVALID_BLOCK, error.code
if required_iters is None:
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_POSPACE
return AddBlockResult.INVALID_BLOCK, Err.INVALID_POSPACE
# We are passing in sub_slot_iters here so we don't need to backtrack until the start of the epoch to find
# the sub slot iters and difficulty. This allows us to keep the cache small.
@ -127,7 +127,7 @@ class WalletBlockchain(BlockchainInterface):
latest_timestamp = None
self._height_to_hash[block_record.height] = block_record.header_hash
await self.set_peak_block(block, latest_timestamp)
return ReceiveBlockResult.NEW_PEAK, None
return AddBlockResult.NEW_PEAK, None
elif block_record.weight > self._peak.weight:
if block_record.prev_hash == self._peak.header_hash:
fork_height: int = self._peak.height
@ -147,8 +147,8 @@ class WalletBlockchain(BlockchainInterface):
self._difficulty = uint64(block_record.weight - self.block_record(block_record.prev_hash).weight)
await self.set_peak_block(block, latest_timestamp)
await self.clean_block_records()
return ReceiveBlockResult.NEW_PEAK, None
return ReceiveBlockResult.ADDED_AS_ORPHAN, None
return AddBlockResult.NEW_PEAK, None
return AddBlockResult.ADDED_AS_ORPHAN, None
async def _rollback_to_height(self, height: int) -> None:
if self._peak is None:

View File

@ -64,19 +64,6 @@ class WalletCoinStore:
)
return int(0 if row is None else row[0])
async def get_multiple_coin_records(self, coin_names: List[bytes32]) -> List[WalletCoinRecord]:
"""Return WalletCoinRecord(s) that have a coin name in the specified list"""
if len(coin_names) == 0:
return []
as_hexes = [cn.hex() for cn in coin_names]
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
f'SELECT * from coin_record WHERE coin_name in ({"?," * (len(as_hexes) - 1)}?)', tuple(as_hexes)
)
return [self.coin_record_from_row(row) for row in rows]
# Store CoinRecord in DB and ram cache
async def add_coin_record(self, record: WalletCoinRecord, name: Optional[bytes32] = None) -> None:
if name is None:

View File

@ -16,7 +16,7 @@ from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey
from packaging.version import Version
from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain import ReceiveBlockResult
from chia.consensus.blockchain import AddBlockResult
from chia.consensus.constants import ConsensusConstants
from chia.daemon.keychain_proxy import KeychainProxy, connect_to_keychain_and_validate, wrap_local_keychain
from chia.full_node.full_node_api import FullNodeAPI
@ -36,7 +36,6 @@ from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from chia.types.coin_spend import CoinSpend
from chia.types.header_block import HeaderBlock
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.types.peer_info import PeerInfo
from chia.types.weight_proof import WeightProof
from chia.util.chunks import chunks
from chia.util.config import (
@ -95,29 +94,20 @@ class WalletNode:
log: logging.Logger = logging.getLogger(__name__)
# Normal operation data
cached_blocks: Dict = dataclasses.field(default_factory=dict)
future_block_hashes: Dict = dataclasses.field(default_factory=dict)
# Sync data
proof_hashes: List = dataclasses.field(default_factory=list)
state_changed_callback: Optional[StateChangedProtocol] = None
_wallet_state_manager: Optional[WalletStateManager] = None
_weight_proof_handler: Optional[WalletWeightProofHandler] = None
_server: Optional[ChiaServer] = None
wsm_close_task: Optional[asyncio.Task] = None
sync_task: Optional[asyncio.Task] = None
logged_in_fingerprint: Optional[int] = None
peer_task: Optional[asyncio.Task] = None
logged_in: bool = False
_keychain_proxy: Optional[KeychainProxy] = None
height_to_time: Dict[uint32, uint64] = dataclasses.field(default_factory=dict)
# Peers that we have long synced to
synced_peers: Set[bytes32] = dataclasses.field(default_factory=set)
wallet_peers: Optional[WalletPeers] = None
wallet_peers_initialized: bool = False
valid_wp_cache: Dict[bytes32, Any] = dataclasses.field(default_factory=dict)
untrusted_caches: Dict[bytes32, PeerRequestCache] = dataclasses.field(default_factory=dict)
peer_caches: Dict[bytes32, PeerRequestCache] = dataclasses.field(default_factory=dict)
# in Untrusted mode wallet might get the state update before receiving the block
race_cache: Dict[bytes32, Set[CoinState]] = dataclasses.field(default_factory=dict)
race_cache_hashes: List[Tuple[uint32, bytes32]] = dataclasses.field(default_factory=list)
@ -129,7 +119,6 @@ class WalletNode:
# Duration in seconds
wallet_tx_resend_timeout_secs: int = 1800
_new_peak_queue: Optional[NewPeakQueue] = None
full_node_peer: Optional[PeerInfo] = None
_shut_down: bool = False
_process_new_subscriptions_task: Optional[asyncio.Task] = None
@ -186,13 +175,13 @@ class WalletNode:
return self._keychain_proxy
def get_cache_for_peer(self, peer) -> PeerRequestCache:
if peer.peer_node_id not in self.untrusted_caches:
self.untrusted_caches[peer.peer_node_id] = PeerRequestCache()
return self.untrusted_caches[peer.peer_node_id]
if peer.peer_node_id not in self.peer_caches:
self.peer_caches[peer.peer_node_id] = PeerRequestCache()
return self.peer_caches[peer.peer_node_id]
def rollback_request_caches(self, reorg_height: int):
# Everything after reorg_height should be removed from the cache
for cache in self.untrusted_caches.values():
for cache in self.peer_caches.values():
cache.clear_after_height(reorg_height)
async def get_key_for_fingerprint(self, fingerprint: Optional[int]) -> Optional[PrivateKey]:
@ -388,7 +377,6 @@ class WalletNode:
index = await self.wallet_state_manager.puzzle_store.get_last_derivation_path()
if index is None or index < self.wallet_state_manager.initial_num_public_keys - 1:
await self.wallet_state_manager.create_more_puzzle_hashes(from_zero=True)
self.wsm_close_task = None
return True
def _close(self):
@ -503,7 +491,7 @@ class WalletNode:
async with self.wallet_state_manager.db_wrapper.writer():
self.log.info(f"retrying coin_state: {state}")
try:
await self.wallet_state_manager.new_coin_state(
await self.wallet_state_manager.add_coin_states(
[state], peer, None if fork_height == 0 else fork_height
)
except Exception as e:
@ -644,8 +632,8 @@ class WalletNode:
self.local_node_synced = False
self.initialize_wallet_peers()
if peer.peer_node_id in self.untrusted_caches:
self.untrusted_caches.pop(peer.peer_node_id)
if peer.peer_node_id in self.peer_caches:
self.peer_caches.pop(peer.peer_node_id)
if peer.peer_node_id in self.synced_peers:
self.synced_peers.remove(peer.peer_node_id)
if peer.peer_node_id in self.node_peaks:
@ -862,7 +850,7 @@ class WalletNode:
f"{inner_idx_start + len(inner_states) - 1}/ {len(items)})"
)
try:
await self.wallet_state_manager.new_coin_state(valid_states, peer, fork_height)
await self.wallet_state_manager.add_coin_states(valid_states, peer, fork_height)
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Exception while adding state: {e} {tb}")
@ -893,7 +881,7 @@ class WalletNode:
async with self.wallet_state_manager.db_wrapper.writer():
try:
self.log.info(f"new coin state received ({idx}-{idx + len(states) - 1}/ {len(items)})")
await self.wallet_state_manager.new_coin_state(states, peer, fork_height)
await self.wallet_state_manager.add_coin_states(states, peer, fork_height)
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Error adding states.. {e} {tb}")
@ -1009,17 +997,12 @@ class WalletNode:
Returns the timestamp for transaction block at h=height, if not transaction block, backtracks until it finds
a transaction block
"""
if height in self.height_to_time:
return self.height_to_time[height]
for cache in self.untrusted_caches.values():
for cache in self.peer_caches.values():
cache_ts: Optional[uint64] = cache.get_height_timestamp(height)
if cache_ts is not None:
return cache_ts
peers: List[WSChiaConnection] = self.get_full_node_peers_in_order()
last_tx_block: Optional[HeaderBlock] = None
for peer in peers:
for peer in self.get_full_node_peers_in_order():
last_tx_block = await fetch_last_tx_from_peer(height, peer)
if last_tx_block is None:
continue
@ -1236,7 +1219,7 @@ class WalletNode:
for block in blocks:
# Set blockchain to the latest peak
res, err = await self.wallet_state_manager.blockchain.receive_block(block)
if res == ReceiveBlockResult.INVALID_BLOCK:
if res == AddBlockResult.INVALID_BLOCK:
raise ValueError(err)
return fork_height

View File

@ -125,7 +125,7 @@ class WalletNodeAPI:
self, request: introducer_protocol.RespondPeersIntroducer, peer: WSChiaConnection
):
if self.wallet_node.wallet_peers is not None:
await self.wallet_node.wallet_peers.respond_peers(request, peer.get_peer_info(), False)
await self.wallet_node.wallet_peers.add_peers(request.peer_list, peer.get_peer_info(), False)
if peer is not None and peer.connection_type is NodeType.INTRODUCER:
await peer.close()
@ -136,7 +136,7 @@ class WalletNodeAPI:
return None
self.log.info(f"Wallet received {len(request.peer_list)} peers.")
await self.wallet_node.wallet_peers.respond_peers(request, peer.get_peer_info(), True)
await self.wallet_node.wallet_peers.add_peers(request.peer_list, peer.get_peer_info(), True)
return None

View File

@ -8,17 +8,18 @@ from typing_extensions import Protocol
from chia.server.ws_connection import WSChiaConnection
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.ints import uint32, uint64, uint128
from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_info import WalletInfo
if TYPE_CHECKING:
from chia.wallet.wallet_state_manager import WalletStateManager
class WalletProtocol(Protocol):
# TODO: it seems like this should return WalletType instead
@classmethod
def type(cls) -> uint8:
def type(cls) -> WalletType:
...
def id(self) -> uint32:
@ -63,6 +64,7 @@ class WalletProtocol(Protocol):
def get_name(self) -> str:
...
wallet_info: WalletInfo
# WalletStateManager is only imported for type hinting thus leaving pylint
# unable to process this
wallet_state_manager: WalletStateManager # pylint: disable=used-before-assignment

View File

@ -5,7 +5,6 @@ import json
import logging
import multiprocessing.context
import time
from collections import defaultdict
from contextlib import asynccontextmanager
from pathlib import Path
from secrets import token_bytes
@ -32,13 +31,12 @@ from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
from chia.types.coin_spend import CoinSpend, compute_additions
from chia.types.full_block import FullBlock
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.util.bech32m import encode_puzzle_hash
from chia.util.db_synchronous import db_synchronous_on
from chia.util.db_wrapper import DBWrapper2
from chia.util.errors import Err
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.ints import uint32, uint64, uint128
from chia.util.lru_cache import LRUCache
from chia.util.path import path_from_root
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
@ -66,7 +64,6 @@ from chia.wallet.notification_manager import NotificationManager
from chia.wallet.outer_puzzles import AssetType
from chia.wallet.puzzle_drivers import PuzzleInfo
from chia.wallet.puzzles.cat_loader import CAT_MOD, CAT_MOD_HASH
from chia.wallet.settings.user_settings import UserSettings
from chia.wallet.singleton import create_fullpuz
from chia.wallet.trade_manager import TradeManager
from chia.wallet.trading.trade_status import TradeStatus
@ -103,8 +100,6 @@ class WalletStateManager:
nft_store: WalletNftStore
basic_store: KeyValStore
start_index: int
# Makes sure only one asyncio thread is changing the blockchain state at one time
lock: asyncio.Lock
@ -112,11 +107,9 @@ class WalletStateManager:
# TODO Don't allow user to send tx until wallet is synced
_sync_target: Optional[uint32]
genesis: FullBlock
state_changed_callback: Optional[StateChangedProtocol] = None
pending_tx_callback: Optional[Callable]
puzzle_hash_created_callbacks: Dict = defaultdict(lambda *x: None)
db_path: Path
db_wrapper: DBWrapper2
@ -126,8 +119,6 @@ class WalletStateManager:
trade_manager: TradeManager
notification_manager: NotificationManager
new_wallet: bool
user_settings: UserSettings
blockchain: WalletBlockchain
coin_store: WalletCoinStore
interested_store: WalletInterestedStore
@ -154,7 +145,6 @@ class WalletStateManager:
name: str = None,
):
self = WalletStateManager()
self.new_wallet = False
self.config = config
self.constants = constants
self.server = server
@ -188,7 +178,6 @@ class WalletStateManager:
self.basic_store = await KeyValStore.create(self.db_wrapper)
self.trade_manager = await TradeManager.create(self, self.db_wrapper)
self.notification_manager = await NotificationManager.create(self, self.db_wrapper)
self.user_settings = await UserSettings.create(self.basic_store)
self.pool_store = await WalletPoolStore.create(self.db_wrapper)
self.dl_store = await DataLayerStore.create(self.db_wrapper)
self.interested_store = await WalletInterestedStore.create(self.db_wrapper)
@ -216,35 +205,36 @@ class WalletStateManager:
wallet = None
for wallet_info in await self.get_all_wallet_info_entries():
if wallet_info.type == WalletType.STANDARD_WALLET:
wallet_type = WalletType(wallet_info.type)
if wallet_type == WalletType.STANDARD_WALLET:
if wallet_info.id == 1:
continue
wallet = await Wallet.create(self, wallet_info)
elif wallet_info.type == WalletType.CAT:
elif wallet_type == WalletType.CAT:
wallet = await CATWallet.create(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.DECENTRALIZED_ID:
elif wallet_type == WalletType.DECENTRALIZED_ID:
wallet = await DIDWallet.create(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.NFT:
elif wallet_type == WalletType.NFT:
wallet = await NFTWallet.create(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.POOLING_WALLET:
elif wallet_type == WalletType.POOLING_WALLET:
wallet = await PoolWallet.create_from_db(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.DATA_LAYER:
elif wallet_type == WalletType.DATA_LAYER:
wallet = await DataLayerWallet.create(
self,
self.main_wallet,
@ -255,9 +245,6 @@ class WalletStateManager:
return self
def get_public_key(self, index: uint32) -> G1Element:
return master_sk_to_wallet_sk(self.private_key, index).get_g1()
def get_public_key_unhardened(self, index: uint32) -> G1Element:
return master_sk_to_wallet_sk_unhardened(self.private_key, index).get_g1()
@ -340,7 +327,7 @@ class WalletStateManager:
intermediate_sk = master_sk_to_wallet_sk_intermediate(self.private_key)
intermediate_sk_un = master_sk_to_wallet_sk_unhardened_intermediate(self.private_key)
for index in range(start_index, last_index):
if WalletType(target_wallet.type()) == WalletType.POOLING_WALLET:
if target_wallet.type() == WalletType.POOLING_WALLET:
continue
# Hardened
@ -356,7 +343,7 @@ class WalletStateManager:
uint32(index),
puzzlehash,
pubkey,
WalletType(target_wallet.type()),
target_wallet.type(),
uint32(target_wallet.id()),
True,
)
@ -378,7 +365,7 @@ class WalletStateManager:
uint32(index),
puzzlehash_unhardened,
pubkey_unhardened,
WalletType(target_wallet.type()),
target_wallet.type(),
uint32(target_wallet.id()),
False,
)
@ -474,18 +461,6 @@ class WalletStateManager:
"""
self.pending_tx_callback = callback
def set_coin_with_puzzlehash_created_callback(self, puzzlehash: bytes32, callback: Callable):
"""
Callback to be called when new coin is seen with specified puzzlehash
"""
self.puzzle_hash_created_callbacks[puzzlehash] = callback
async def puzzle_hash_created(self, coin: Coin):
callback = self.puzzle_hash_created_callbacks[coin.puzzle_hash]
if callback is None:
return None
await callback(coin)
def state_changed(self, state: str, wallet_id: Optional[int] = None, data_object: Optional[Dict[str, Any]] = None):
"""
Calls the callback if it's present.
@ -770,7 +745,7 @@ class WalletStateManager:
self, self.main_wallet, bytes(tail_hash).hex()[2:]
)
wallet_id = cat_wallet.id()
wallet_type = WalletType(cat_wallet.type())
wallet_type = cat_wallet.type()
else:
# Found unacknowledged CAT, save it in the database.
await self.interested_store.add_unacknowledged_token(
@ -862,7 +837,7 @@ class WalletStateManager:
assert isinstance(wallet, DIDWallet)
assert wallet.did_info.origin_coin is not None
if origin_coin.name() == wallet.did_info.origin_coin.name():
return wallet.id(), WalletType(wallet.type())
return wallet.id(), wallet.type()
did_wallet = await DIDWallet.create_new_did_wallet_from_coin_spend(
self,
self.main_wallet,
@ -872,7 +847,7 @@ class WalletStateManager:
f"DID {encode_puzzle_hash(launch_id, AddressType.DID.hrp(self.config))}",
)
wallet_id = did_wallet.id()
wallet_type = WalletType(did_wallet.type())
wallet_type = did_wallet.type()
self.state_changed("wallet_created", wallet_id, {"did_id": did_wallet.get_my_DID()})
return wallet_id, wallet_type
@ -1013,7 +988,7 @@ class WalletStateManager:
wallet_type = WalletType.NFT
return wallet_id, wallet_type
async def new_coin_state(
async def add_coin_states(
self,
coin_states: List[CoinState],
peer: WSChiaConnection,
@ -1033,10 +1008,12 @@ class WalletStateManager:
used_up_to = -1
ph_to_index_cache: LRUCache = LRUCache(100)
local_records = await self.coin_store.get_coin_records([st.coin.name() for st in coin_states])
coin_names = [coin_state.coin.name() for coin_state in coin_states]
local_records = await self.coin_store.get_coin_records(coin_names)
for coin_state in coin_states:
local_record = local_records.get(coin_state.coin.name())
for coin_name, coin_state in zip(coin_names, coin_states):
self.log.debug("Add coin state: %s: %s", coin_name, coin_state)
local_record = local_records.get(coin_name)
rollback_wallets = None
try:
async with self.db_wrapper.writer():
@ -1044,12 +1021,9 @@ class WalletStateManager:
# This only succeeds if we don't raise out of the transaction
await self.retry_store.remove_state(coin_state)
existing: Optional[WalletCoinRecord]
coin_name: bytes32 = coin_state.coin.name()
wallet_info: Optional[Tuple[uint32, WalletType]] = await self.get_wallet_id_for_puzzle_hash(
coin_state.coin.puzzle_hash
)
self.log.debug("%s: %s", coin_name, coin_state)
# If we already have this coin, & it was spent & confirmed at the same heights, then return (done)
if local_record is not None:
@ -1076,11 +1050,11 @@ class WalletStateManager:
potential_dl = self.get_dl_wallet()
if potential_dl is not None:
if (
await potential_dl.get_singleton_record(coin_state.coin.name()) is not None
await potential_dl.get_singleton_record(coin_name) is not None
or coin_state.coin.puzzle_hash == MIRROR_PUZZLE_HASH
):
wallet_id = potential_dl.id()
wallet_type = WalletType(potential_dl.type())
wallet_type = potential_dl.type()
if wallet_id is None or wallet_type is None:
self.log.debug(f"No wallet for coin state: {coin_state}")
@ -1094,7 +1068,7 @@ class WalletStateManager:
ph_to_index_cache.put(coin_state.coin.puzzle_hash, derivation_index)
if derivation_index > used_up_to:
await self.puzzle_store.set_used_up_to(derivation_index)
used_up_to = max(used_up_to, derivation_index)
used_up_to = derivation_index
if coin_state.created_height is None:
# TODO implements this coin got reorged
@ -1115,8 +1089,8 @@ class WalletStateManager:
# if the coin has been spent
elif coin_state.created_height is not None and coin_state.spent_height is not None:
self.log.debug("Coin Removed: %s", coin_state)
children: Optional[List[CoinState]] = None
self.log.debug("Coin spent: %s", coin_state)
children = await self.wallet_node.fetch_children(coin_name, peer=peer, fork_height=fork_height)
record = local_record
if record is None:
farmer_reward = False
@ -1175,10 +1149,6 @@ class WalletStateManager:
)
await self.tx_store.add_transaction_record(tx_record)
children = await self.wallet_node.fetch_children(
coin_name, peer=peer, fork_height=fork_height
)
assert children is not None
additions = [state.coin for state in children]
if len(children) > 0:
fee = 0
@ -1309,11 +1279,6 @@ class WalletStateManager:
await nft_wallet.remove_coin(coin_state.coin, uint32(coin_state.spent_height))
# Check if a child is a singleton launcher
if children is None:
children = await self.wallet_node.fetch_children(
coin_name, peer=peer, fork_height=fork_height
)
assert children is not None
for child in children:
if child.coin.puzzle_hash != SINGLETON_LAUNCHER_HASH:
continue
@ -1331,7 +1296,7 @@ class WalletStateManager:
pool_state = solution_to_pool_state(launcher_spend)
assert pool_state is not None
except (AssertionError, ValueError) as e:
self.log.debug(f"Not a pool wallet launcher {e}")
self.log.debug(f"Not a pool wallet launcher {e}, child: {child}")
matched, inner_puzhash = await DataLayerWallet.match_dl_launcher(launcher_spend)
if (
matched
@ -1380,7 +1345,7 @@ class WalletStateManager:
uint32(coin_state.spent_height),
[],
pool_wallet.id(),
WalletType(pool_wallet.type()),
pool_wallet.type(),
peer,
coin_name,
)
@ -1389,7 +1354,7 @@ class WalletStateManager:
else:
raise RuntimeError("All cases already handled") # Logic error, all cases handled
except Exception as e:
self.log.exception(f"Error adding state... {e}")
self.log.exception(f"Failed to add coin_state: {coin_state}, error: {e}")
if rollback_wallets is not None:
self.wallets = rollback_wallets # Restore since DB will be rolled back by writer
if isinstance(e, PeerRequestException) or isinstance(e, aiosqlite.Error):
@ -1447,8 +1412,7 @@ class WalletStateManager:
if wallet_id not in self.wallets.keys():
self.log.warning(f"Do not have wallet {wallet_id} for puzzle_hash {puzzle_hash}")
return None
wallet_type = WalletType(self.wallets[uint32(wallet_id)].type())
return uint32(wallet_id), wallet_type
return uint32(wallet_id), self.wallets[uint32(wallet_id)].type()
return None
async def coin_added(
@ -1472,32 +1436,31 @@ class WalletStateManager:
wallet_id,
wallet_type,
)
farmer_reward = False
pool_reward = False
if self.is_farmer_reward(height, coin):
farmer_reward = True
elif self.is_pool_reward(height, coin):
pool_reward = True
farm_reward = False
parent_coin_record: Optional[WalletCoinRecord] = await self.coin_store.get_coin_record(coin.parent_coin_info)
if parent_coin_record is not None and wallet_type.value == parent_coin_record.wallet_type:
change = True
if self.is_pool_reward(height, coin):
tx_type = TransactionType.COINBASE_REWARD
elif self.is_farmer_reward(height, coin):
tx_type = TransactionType.FEE_REWARD
else:
change = False
tx_type = TransactionType.INCOMING_TX
if farmer_reward or pool_reward:
farm_reward = True
if pool_reward:
tx_type: int = TransactionType.COINBASE_REWARD.value
else:
tx_type = TransactionType.FEE_REWARD.value
timestamp = await self.wallet_node.get_timestamp_for_height(height)
coinbase = tx_type in {TransactionType.FEE_REWARD, TransactionType.COINBASE_REWARD}
coin_confirmed_transaction = False
if not coinbase:
for record in all_unconfirmed_transaction_records:
if coin in record.additions and not record.confirmed:
await self.tx_store.set_confirmed(record.name, height)
coin_confirmed_transaction = True
break
parent_coin_record: Optional[WalletCoinRecord] = await self.coin_store.get_coin_record(coin.parent_coin_info)
change = parent_coin_record is not None and wallet_type.value == parent_coin_record.wallet_type
if coinbase or not coin_confirmed_transaction and not change:
tx_record = TransactionRecord(
confirmed_at_height=uint32(height),
created_at_time=timestamp,
to_puzzle_hash=(await self.convert_puzzle_hash(wallet_id, coin.puzzle_hash)),
created_at_time=await self.wallet_node.get_timestamp_for_height(height),
to_puzzle_hash=await self.convert_puzzle_hash(wallet_id, coin.puzzle_hash),
amount=uint64(coin.amount),
fee_amount=uint64(0),
confirmed=True,
@ -1512,45 +1475,13 @@ class WalletStateManager:
name=coin_name,
memos=[],
)
await self.tx_store.add_transaction_record(tx_record)
else:
records: List[TransactionRecord] = []
for record in all_unconfirmed_transaction_records:
for add_coin in record.additions:
if add_coin == coin:
records.append(record)
if tx_record.amount > 0:
await self.tx_store.add_transaction_record(tx_record)
if len(records) > 0:
for record in records:
if record.confirmed is False:
await self.tx_store.set_confirmed(record.name, height)
elif not change:
timestamp = await self.wallet_node.get_timestamp_for_height(height)
tx_record = TransactionRecord(
confirmed_at_height=uint32(height),
created_at_time=timestamp,
to_puzzle_hash=(await self.convert_puzzle_hash(wallet_id, coin.puzzle_hash)),
amount=uint64(coin.amount),
fee_amount=uint64(0),
confirmed=True,
sent=uint32(0),
spend_bundle=None,
additions=[coin],
removals=[],
wallet_id=wallet_id,
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=coin_name,
memos=[],
)
if coin.amount > 0:
await self.tx_store.add_transaction_record(tx_record)
coin_record_1: WalletCoinRecord = WalletCoinRecord(
coin, height, uint32(0), False, farm_reward, wallet_type, wallet_id
coin_record: WalletCoinRecord = WalletCoinRecord(
coin, height, uint32(0), False, coinbase, wallet_type, wallet_id
)
await self.coin_store.add_coin_record(coin_record_1, coin_name)
await self.coin_store.add_coin_record(coin_record, coin_name)
await self.wallets[wallet_id].coin_added(coin, height, peer)
@ -1663,14 +1594,6 @@ class WalletStateManager:
records = await self.coin_store.get_coin_records(**kwargs)
return [await self.get_coin_record_by_wallet_record(record) for record in records.values()]
async def is_addition_relevant(self, addition: Coin):
"""
Check whether we care about a new addition (puzzle_hash). Returns true if we
control this puzzle hash.
"""
result = await self.puzzle_store.puzzle_hash_exists(addition.puzzle_hash)
return result
async def get_wallet_for_coin(self, coin_id: bytes32) -> Optional[WalletProtocol]:
coin_record = await self.coin_store.get_coin_record(coin_id)
if coin_record is None:
@ -1753,10 +1676,9 @@ class WalletStateManager:
name,
)
async def add_new_wallet(self, wallet: WalletProtocol, wallet_id: int, create_puzzle_hashes: bool = True) -> None:
self.wallets[uint32(wallet_id)] = wallet
if create_puzzle_hashes:
await self.create_more_puzzle_hashes()
async def add_new_wallet(self, wallet: WalletProtocol) -> None:
self.wallets[wallet.id()] = wallet
await self.create_more_puzzle_hashes()
self.state_changed("wallet_created")
async def get_spendable_coins_for_wallet(
@ -1789,7 +1711,7 @@ class WalletStateManager:
async def new_peak(self, peak: wallet_protocol.NewPeakWallet):
for wallet_id, wallet in self.wallets.items():
if wallet.type() == uint8(WalletType.POOLING_WALLET):
if wallet.type() == WalletType.POOLING_WALLET:
assert isinstance(wallet, PoolWallet)
await wallet.new_peak(uint64(peak.height))
current_time = int(time.time())

View File

@ -18,7 +18,7 @@ from chia.util.setproctitle import getproctitle, setproctitle
log = logging.getLogger(__name__)
def _create_shutdown_file() -> IO:
def _create_shutdown_file() -> IO[bytes]:
return tempfile.NamedTemporaryFile(prefix="chia_wallet_weight_proof_handler_executor_shutdown_trigger")
@ -30,16 +30,16 @@ class WalletWeightProofHandler:
):
self._constants = constants
self._num_processes = 4
self._executor_shutdown_tempfile: IO = _create_shutdown_file()
self._executor_shutdown_tempfile: IO[bytes] = _create_shutdown_file()
self._executor: ProcessPoolExecutor = ProcessPoolExecutor(
self._num_processes,
mp_context=multiprocessing_context,
initializer=setproctitle,
initargs=(f"{getproctitle()}_worker",),
)
self._weight_proof_tasks: List[asyncio.Task] = []
self._weight_proof_tasks: List[asyncio.Task[Tuple[bool, List[BlockRecord]]]] = []
def cancel_weight_proof_tasks(self):
def cancel_weight_proof_tasks(self) -> None:
for task in self._weight_proof_tasks:
if not task.done():
task.cancel()
@ -56,7 +56,7 @@ class WalletWeightProofHandler:
log.error("weight proof failed sub epoch data validation")
return False, [], []
validate_from = get_fork_ses_idx(old_proof, weight_proof)
task: asyncio.Task = asyncio.create_task(
task = asyncio.create_task(
validate_weight_proof_inner(
self._constants,
self._executor,

File diff suppressed because one or more lines are too long

View File

@ -14,7 +14,7 @@ dependencies = [
"chiapos==1.0.11", # proof of space
"clvm==0.9.7",
"clvm_tools==0.4.6", # Currying, Program.to, other conveniences
"chia_rs==0.2.4",
"chia_rs==0.2.5",
"clvm-tools-rs==0.1.30", # Rust implementation of clvm_tools' compiler
"aiohttp==3.8.4", # HTTP server for full node rpc
"aiosqlite==0.17.0", # asyncio wrapper for sqlite, to store blocks

View File

@ -1,6 +1,6 @@
from typing import Optional, List
from chia.consensus.blockchain import Blockchain, ReceiveBlockResult
from chia.consensus.blockchain import Blockchain, AddBlockResult
from chia.consensus.multiprocess_validation import PreValidationResult
from chia.types.full_block import FullBlock
from chia.util.errors import Err
@ -39,7 +39,7 @@ async def check_block_store_invariant(bc: Blockchain):
async def _validate_and_add_block(
blockchain: Blockchain,
block: FullBlock,
expected_result: Optional[ReceiveBlockResult] = None,
expected_result: Optional[AddBlockResult] = None,
expected_error: Optional[Err] = None,
skip_prevalidation: bool = False,
fork_point_with_peak: Optional[uint32] = None,
@ -48,7 +48,7 @@ async def _validate_and_add_block(
# block is added to the peak.
# If expected_result is not None, that result will be enforced.
# If expected_error is not None, that error will be enforced. If expected_error is not None,
# receive_block must return Err.INVALID_BLOCK.
# add_block must return Err.INVALID_BLOCK.
# If expected_result == INVALID_BLOCK but expected_error is None, we will allow for errors to happen
await check_block_store_invariant(blockchain)
@ -62,7 +62,7 @@ async def _validate_and_add_block(
assert pre_validation_results is not None
results = pre_validation_results[0]
if results.error is not None:
if expected_result == ReceiveBlockResult.INVALID_BLOCK and expected_error is None:
if expected_result == AddBlockResult.INVALID_BLOCK and expected_error is None:
# We expected an error but didn't specify which one
await check_block_store_invariant(blockchain)
return None
@ -79,10 +79,10 @@ async def _validate_and_add_block(
result,
err,
_,
) = await blockchain.receive_block(block, results, fork_point_with_peak=fork_point_with_peak)
) = await blockchain.add_block(block, results, fork_point_with_peak=fork_point_with_peak)
await check_block_store_invariant(blockchain)
if expected_error is None and expected_result != ReceiveBlockResult.INVALID_BLOCK:
if expected_error is None and expected_result != AddBlockResult.INVALID_BLOCK:
# Expecting an error here (but didn't specify which), let's check if we actually got an error
if err is not None:
# Got an error
@ -97,10 +97,10 @@ async def _validate_and_add_block(
raise AssertionError(f"Expected {expected_result} but got {result}")
elif expected_result is None:
# If we expected an error assume that expected_result = INVALID_BLOCK
if expected_error is not None and result != ReceiveBlockResult.INVALID_BLOCK:
if expected_error is not None and result != AddBlockResult.INVALID_BLOCK:
raise AssertionError(f"Block should be invalid, but received: {result}")
# Otherwise, assume that expected_result = NEW_PEAK
if expected_error is None and result != ReceiveBlockResult.NEW_PEAK:
if expected_error is None and result != AddBlockResult.NEW_PEAK:
raise AssertionError(f"Block was not added: {result}")
@ -121,7 +121,7 @@ async def _validate_and_add_block_multi_error(
async def _validate_and_add_block_multi_result(
blockchain: Blockchain,
block: FullBlock,
expected_result: List[ReceiveBlockResult],
expected_result: List[AddBlockResult],
skip_prevalidation: Optional[bool] = None,
) -> None:
try:
@ -146,9 +146,9 @@ async def _validate_and_add_block_no_error(
blockchain,
block,
expected_result=[
ReceiveBlockResult.ALREADY_HAVE_BLOCK,
ReceiveBlockResult.NEW_PEAK,
ReceiveBlockResult.ADDED_AS_ORPHAN,
AddBlockResult.ALREADY_HAVE_BLOCK,
AddBlockResult.NEW_PEAK,
AddBlockResult.ADDED_AS_ORPHAN,
],
skip_prevalidation=skip_prevalidation,
)

View File

@ -14,7 +14,7 @@ from clvm.casts import int_to_bytes
from chia.consensus.block_header_validation import validate_finished_header_block
from chia.consensus.block_rewards import calculate_base_farmer_reward
from chia.consensus.blockchain import ReceiveBlockResult
from chia.consensus.blockchain import AddBlockResult
from chia.consensus.coinbase import create_farmer_coin
from chia.consensus.constants import ConsensusConstants
from chia.consensus.multiprocess_validation import PreValidationResult
@ -160,9 +160,7 @@ class TestBlockHeaderValidation:
assert error.code == Err.INVALID_NEW_SUB_SLOT_ITERS
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
await _validate_and_add_block(empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK)
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[0],
@ -186,7 +184,7 @@ class TestBlockHeaderValidation:
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad_2, expected_result=ReceiveBlockResult.INVALID_BLOCK
empty_blockchain, block_bad_2, expected_result=AddBlockResult.INVALID_BLOCK
)
# 3c
@ -218,7 +216,7 @@ class TestBlockHeaderValidation:
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad_3, expected_result=ReceiveBlockResult.INVALID_BLOCK
empty_blockchain, block_bad_3, expected_result=AddBlockResult.INVALID_BLOCK
)
# 3d
@ -249,7 +247,7 @@ class TestBlockHeaderValidation:
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad_4, expected_result=ReceiveBlockResult.INVALID_BLOCK
empty_blockchain, block_bad_4, expected_result=AddBlockResult.INVALID_BLOCK
)
await _validate_and_add_block(empty_blockchain, block)
log.info(
@ -471,7 +469,7 @@ class TestBlockHeaderValidation:
)
assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
await _validate_and_add_block(empty_blockchain, block_0_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
await _validate_and_add_block(empty_blockchain, block_0_bad, expected_result=AddBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_non_genesis(self, empty_blockchain, bt):
@ -498,7 +496,7 @@ class TestBlockHeaderValidation:
blocks[1].finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_result=AddBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_empty_ss(self, empty_blockchain, bt):
@ -525,7 +523,7 @@ class TestBlockHeaderValidation:
blocks[1].finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_result=AddBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_genesis_no_icc(self, empty_blockchain, bt):
@ -681,7 +679,7 @@ class TestBlockHeaderValidation:
block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_ICC_HASH_CC
await _validate_and_add_block(blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
await _validate_and_add_block(blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK)
# 2i
new_finished_ss_bad_rc = recursive_replace(
@ -751,7 +749,7 @@ class TestBlockHeaderValidation:
empty_blockchain.constants.SUB_SLOT_ITERS_STARTING,
)
assert error.code == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
await _validate_and_add_block(blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
await _validate_and_add_block(blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_empty_sub_slots_epoch(self, empty_blockchain, default_400_blocks, bt):
@ -765,10 +763,10 @@ class TestBlockHeaderValidation:
for block in blocks_base:
await _validate_and_add_block(empty_blockchain, block, skip_prevalidation=True)
await _validate_and_add_block(
empty_blockchain, blocks_1[-1], expected_result=ReceiveBlockResult.NEW_PEAK, skip_prevalidation=True
empty_blockchain, blocks_1[-1], expected_result=AddBlockResult.NEW_PEAK, skip_prevalidation=True
)
await _validate_and_add_block(
empty_blockchain, blocks_2[-1], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN, skip_prevalidation=True
empty_blockchain, blocks_2[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, skip_prevalidation=True
)
@pytest.mark.asyncio
@ -1233,25 +1231,19 @@ class TestBlockHeaderValidation:
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.challenge_chain_sp_vdf.challenge", std_hash(b"1")
)
await _validate_and_add_block(
empty_blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
await _validate_and_add_block(empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK)
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.challenge_chain_sp_vdf.output",
bad_element,
)
await _validate_and_add_block(
empty_blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
await _validate_and_add_block(empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK)
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.challenge_chain_sp_vdf.number_of_iterations",
uint64(1111111111111),
)
await _validate_and_add_block(
empty_blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
await _validate_and_add_block(empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK)
block_bad = recursive_replace(
blocks[-1],
"challenge_chain_sp_proof",
@ -1697,7 +1689,7 @@ class TestBlockHeaderValidation:
co = ConditionOpcode
rbr = ReceiveBlockResult
rbr = AddBlockResult
class TestPreValidation:
@ -1737,11 +1729,11 @@ class TestPreValidation:
assert res[n].error is None
block = blocks_to_validate[n]
start_rb = time.time()
result, err, _ = await empty_blockchain.receive_block(block, res[n])
result, err, _ = await empty_blockchain.add_block(block, res[n])
end_rb = time.time()
times_rb.append(end_rb - start_rb)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
assert result == AddBlockResult.NEW_PEAK
log.info(
f"Added block {block.height} total iters {block.total_iters} "
f"new slot? {len(block.finished_sub_slots)}, time {end_rb - start_rb}"
@ -1826,18 +1818,162 @@ class TestBodyValidation:
)
# Ignore errors from pre-validation, we are testing block_body_validation
repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1))
code, err, state_change = await b.receive_block(blocks[-1], repl_preval_results)
assert code == ReceiveBlockResult.NEW_PEAK
code, err, state_change = await b.add_block(blocks[-1], repl_preval_results)
assert code == AddBlockResult.NEW_PEAK
assert err is None
assert state_change.fork_height == 2
@pytest.mark.asyncio
@pytest.mark.parametrize("with_softfork2", [False, True])
@pytest.mark.parametrize(
"opcode,lock_value,expected",
[
# the 3 blocks, starting at timestamp 10000 (and height 0).
# each block is 10 seconds apart.
# the 4th block (height 3, time 10030) spends a coin with the condition specified
# by the test case. The coin was born in height 2 at time 10020
# MY BIRHT HEIGHT
(co.ASSERT_MY_BIRTH_HEIGHT, -1, rbr.INVALID_BLOCK),
(co.ASSERT_MY_BIRTH_HEIGHT, 0x100000000, rbr.INVALID_BLOCK),
(co.ASSERT_MY_BIRTH_HEIGHT, 2, rbr.NEW_PEAK), # <- coin birth height
(co.ASSERT_MY_BIRTH_HEIGHT, 3, rbr.INVALID_BLOCK),
# MY BIRHT SECONDS
(co.ASSERT_MY_BIRTH_SECONDS, -1, rbr.INVALID_BLOCK),
(co.ASSERT_MY_BIRTH_SECONDS, 0x10000000000000000, rbr.INVALID_BLOCK),
(co.ASSERT_MY_BIRTH_SECONDS, 10019, rbr.INVALID_BLOCK),
(co.ASSERT_MY_BIRTH_SECONDS, 10020, rbr.NEW_PEAK), # <- coin birth time
(co.ASSERT_MY_BIRTH_SECONDS, 10021, rbr.INVALID_BLOCK),
# SECONDS RELATIVE
(co.ASSERT_SECONDS_RELATIVE, -2, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_RELATIVE, -1, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_RELATIVE, 0, rbr.NEW_PEAK), # <- birth time
(co.ASSERT_SECONDS_RELATIVE, 1, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_RELATIVE, 9, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_RELATIVE, 10, rbr.INVALID_BLOCK), # <- current block time
(co.ASSERT_SECONDS_RELATIVE, 11, rbr.INVALID_BLOCK),
# BEFORE SECONDS RELATIVE
(co.ASSERT_BEFORE_SECONDS_RELATIVE, -2, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, -1, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 0, rbr.INVALID_BLOCK), # <- birth time
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 1, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 9, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 10, rbr.NEW_PEAK), # <- current block time
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 11, rbr.NEW_PEAK),
# HEIGHT RELATIVE
(co.ASSERT_HEIGHT_RELATIVE, -2, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_RELATIVE, -1, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_RELATIVE, 0, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_RELATIVE, 1, rbr.INVALID_BLOCK),
# BEFORE HEIGHT RELATIVE
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -2, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -1, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 0, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 1, rbr.NEW_PEAK),
# HEIGHT ABSOLUTE
(co.ASSERT_HEIGHT_ABSOLUTE, 1, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_ABSOLUTE, 2, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_ABSOLUTE, 3, rbr.INVALID_BLOCK),
(co.ASSERT_HEIGHT_ABSOLUTE, 4, rbr.INVALID_BLOCK),
# BEFORE HEIGHT ABSOLUTE
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 1, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 2, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 3, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 4, rbr.NEW_PEAK),
# SECONDS ABSOLUTE
# genesis timestamp is 10000 and each block is 10 seconds
(co.ASSERT_SECONDS_ABSOLUTE, 10019, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_ABSOLUTE, 10020, rbr.NEW_PEAK), # <- previous tx-block
(co.ASSERT_SECONDS_ABSOLUTE, 10021, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_ABSOLUTE, 10029, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_ABSOLUTE, 10030, rbr.INVALID_BLOCK), # <- current block
(co.ASSERT_SECONDS_ABSOLUTE, 10031, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_ABSOLUTE, 10032, rbr.INVALID_BLOCK),
# BEFORE SECONDS ABSOLUTE
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10019, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10020, rbr.INVALID_BLOCK), # <- previous tx-block
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10021, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10029, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10030, rbr.NEW_PEAK), # <- current block
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10031, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10032, rbr.NEW_PEAK),
],
)
async def test_timelock_conditions(self, opcode, lock_value, expected, with_softfork2, bt):
if with_softfork2:
# enable softfork2 at height 0, to make it apply to this test
constants = test_constants.replace(SOFT_FORK2_HEIGHT=0)
else:
constants = test_constants
# if the softfork is not active in this test, fixup all the
# tests to instead expect NEW_PEAK unconditionally
if opcode in [
ConditionOpcode.ASSERT_MY_BIRTH_HEIGHT,
ConditionOpcode.ASSERT_MY_BIRTH_SECONDS,
ConditionOpcode.ASSERT_BEFORE_SECONDS_RELATIVE,
ConditionOpcode.ASSERT_BEFORE_SECONDS_ABSOLUTE,
ConditionOpcode.ASSERT_BEFORE_HEIGHT_RELATIVE,
ConditionOpcode.ASSERT_BEFORE_HEIGHT_ABSOLUTE,
]:
expected = AddBlockResult.NEW_PEAK
# before soft-fork 2, the timestamp we compared against was the
# current block's timestamp as opposed to the previous tx-block's
# timestamp. These conditions used to be valid, before the soft-fork
if opcode == ConditionOpcode.ASSERT_SECONDS_RELATIVE and lock_value > 0 and lock_value <= 10:
expected = AddBlockResult.NEW_PEAK
if opcode == ConditionOpcode.ASSERT_SECONDS_ABSOLUTE and lock_value > 10020 and lock_value <= 10030:
expected = AddBlockResult.NEW_PEAK
async with make_empty_blockchain(constants) as b:
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
genesis_timestamp=10000,
time_per_block=10,
)
for bl in blocks:
await _validate_and_add_block(b, bl)
wt: WalletTool = bt.get_pool_wallet_tool()
conditions = {opcode: [ConditionWithArgs(opcode, [int_to_bytes(lock_value)])]}
coin = list(blocks[-1].get_included_reward_coins())[0]
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), coin, condition_dic=conditions
)
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=tx,
time_per_block=10,
)
pre_validation_results: List[PreValidationResult] = await b.pre_validate_blocks_multiprocessing(
[blocks[-1]], {}, validate_signatures=True
)
assert pre_validation_results is not None
assert (await b.add_block(blocks[-1], pre_validation_results[0]))[0] == expected
if expected == AddBlockResult.NEW_PEAK:
# ensure coin was in fact spent
c = await b.coin_store.get_coin_record(coin.name())
assert c is not None and c.spent
@pytest.mark.asyncio
@pytest.mark.parametrize("opcode", [ConditionOpcode.AGG_SIG_ME, ConditionOpcode.AGG_SIG_UNSAFE])
@pytest.mark.parametrize(
"with_garbage,expected",
[
(True, (ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_CONDITION, None)),
(False, (ReceiveBlockResult.NEW_PEAK, None, 2)),
(True, (AddBlockResult.INVALID_BLOCK, Err.INVALID_CONDITION, None)),
(False, (AddBlockResult.NEW_PEAK, None, 2)),
],
)
async def test_aggsig_garbage(self, empty_blockchain, opcode, with_garbage, expected, bt):
@ -1884,12 +2020,11 @@ class TestBodyValidation:
)
# Ignore errors from pre-validation, we are testing block_body_validation
repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1))
res, error, state_change = await b.receive_block(blocks[-1], repl_preval_results)
res, error, state_change = await b.add_block(blocks[-1], repl_preval_results)
assert (res, error, state_change.fork_height if state_change else None) == expected
@pytest.mark.asyncio
# soft-fork 2 is disabled (for now)
@pytest.mark.parametrize("with_softfork2", [False])
@pytest.mark.parametrize("with_softfork2", [False, True])
@pytest.mark.parametrize("with_garbage", [True, False])
@pytest.mark.parametrize(
"opcode,lock_value,expected",
@ -1906,31 +2041,70 @@ class TestBodyValidation:
(co.ASSERT_MY_BIRTH_SECONDS, 10030, rbr.NEW_PEAK),
(co.ASSERT_MY_BIRTH_SECONDS, 10031, rbr.INVALID_BLOCK),
# SECONDS RELATIVE
# genesis timestamp is 10000 and each block is 10 seconds
(co.ASSERT_SECONDS_RELATIVE, -2, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_RELATIVE, -1, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_RELATIVE, 0, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_RELATIVE, 0, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_RELATIVE, 1, rbr.INVALID_BLOCK),
# BEFORE SECONDS RELATIVE
# relative conditions are not allowed on ephemeral spends
(co.ASSERT_BEFORE_SECONDS_RELATIVE, -2, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, -1, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 0, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 10, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 0x10000000000000000, rbr.INVALID_BLOCK),
# HEIGHT RELATIVE
(co.ASSERT_HEIGHT_RELATIVE, -2, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_RELATIVE, -1, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_RELATIVE, 0, rbr.INVALID_BLOCK),
(co.ASSERT_HEIGHT_RELATIVE, 1, rbr.INVALID_BLOCK),
# BEFORE HEIGHT RELATIVE
# relative conditions are not allowed on ephemeral spends
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -2, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -1, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 0, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 1, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 0x100000000, rbr.INVALID_BLOCK),
# HEIGHT ABSOLUTE
(co.ASSERT_HEIGHT_ABSOLUTE, 2, rbr.NEW_PEAK),
(co.ASSERT_HEIGHT_ABSOLUTE, 3, rbr.INVALID_BLOCK),
(co.ASSERT_HEIGHT_ABSOLUTE, 4, rbr.INVALID_BLOCK),
# BEFORE HEIGHT ABSOLUTE
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 2, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 3, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 4, rbr.NEW_PEAK),
# SECONDS ABSOLUTE
# genesis timestamp is 10000 and each block is 10 seconds
(co.ASSERT_SECONDS_ABSOLUTE, 10029, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_ABSOLUTE, 10030, rbr.NEW_PEAK),
(co.ASSERT_SECONDS_ABSOLUTE, 10020, rbr.NEW_PEAK), # <- previous tx-block
(co.ASSERT_SECONDS_ABSOLUTE, 10021, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_ABSOLUTE, 10029, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_ABSOLUTE, 10030, rbr.INVALID_BLOCK), # <- current tx-block
(co.ASSERT_SECONDS_ABSOLUTE, 10031, rbr.INVALID_BLOCK),
(co.ASSERT_SECONDS_ABSOLUTE, 10032, rbr.INVALID_BLOCK),
# BEFORE SECONDS ABSOLUTE
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10020, rbr.INVALID_BLOCK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10021, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10030, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10031, rbr.NEW_PEAK),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10032, rbr.NEW_PEAK),
],
)
async def test_ephemeral_timelock(self, opcode, lock_value, expected, with_garbage, with_softfork2, bt):
if with_softfork2:
# enable softfork2 at height 0, to make it apply to this test
constants = test_constants.replace(SOFT_FORK2_HEIGHT=0)
# after the softfork, we don't allow any birth assertions, not
# relative time locks on ephemeral coins. This test is only for
# ephemeral coins, so these cases should always fail
if opcode in [
ConditionOpcode.ASSERT_MY_BIRTH_HEIGHT,
ConditionOpcode.ASSERT_MY_BIRTH_SECONDS,
ConditionOpcode.ASSERT_SECONDS_RELATIVE,
ConditionOpcode.ASSERT_HEIGHT_RELATIVE,
]:
expected = AddBlockResult.INVALID_BLOCK
else:
constants = test_constants
@ -1939,8 +2113,21 @@ class TestBodyValidation:
if opcode in [
ConditionOpcode.ASSERT_MY_BIRTH_HEIGHT,
ConditionOpcode.ASSERT_MY_BIRTH_SECONDS,
ConditionOpcode.ASSERT_BEFORE_HEIGHT_RELATIVE,
ConditionOpcode.ASSERT_BEFORE_HEIGHT_ABSOLUTE,
ConditionOpcode.ASSERT_BEFORE_SECONDS_RELATIVE,
ConditionOpcode.ASSERT_BEFORE_SECONDS_ABSOLUTE,
]:
expected = ReceiveBlockResult.NEW_PEAK
expected = AddBlockResult.NEW_PEAK
# before the softfork, we compared ASSERT_SECONDS_* conditions
# against the current block's timestamp, so we need to
# adjust these test cases
if opcode == co.ASSERT_SECONDS_ABSOLUTE and lock_value > 10020 and lock_value <= 10030:
expected = rbr.NEW_PEAK
if opcode == co.ASSERT_SECONDS_RELATIVE and lock_value > -10 and lock_value <= 0:
expected = rbr.NEW_PEAK
async with make_empty_blockchain(constants) as b:
@ -1985,9 +2172,9 @@ class TestBodyValidation:
[blocks[-1]], {}, validate_signatures=True
)
assert pre_validation_results is not None
assert (await b.receive_block(blocks[-1], pre_validation_results[0]))[0] == expected
assert (await b.add_block(blocks[-1], pre_validation_results[0]))[0] == expected
if expected == ReceiveBlockResult.NEW_PEAK:
if expected == AddBlockResult.NEW_PEAK:
# ensure coin1 was in fact spent
c = await b.coin_store.get_coin_record(coin1.name())
assert c is not None and c.spent
@ -2291,7 +2478,7 @@ class TestBodyValidation:
block_2 = recursive_replace(block, "transactions_generator_ref_list", [block.height - 2, block.height - 1])
# Fails preval
await _validate_and_add_block(b, block_2, expected_error=Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING)
# Fails receive_block
# Fails add_block
await _validate_and_add_block_multi_error(
b,
block_2,
@ -2341,13 +2528,9 @@ class TestBodyValidation:
block_generator: BlockGenerator = BlockGenerator(blocks[-1].transactions_generator, [], [])
npc_result = get_name_puzzle_conditions(
block_generator,
b.constants.MAX_BLOCK_COST_CLVM * 1000,
cost_per_byte=b.constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
block_generator, b.constants.MAX_BLOCK_COST_CLVM * 1000, mempool_mode=False, height=softfork_height
)
err = (await b.receive_block(blocks[-1], PreValidationResult(None, uint64(1), npc_result, True)))[1]
err = (await b.add_block(blocks[-1], PreValidationResult(None, uint64(1), npc_result, True)))[1]
assert err in [Err.BLOCK_COST_EXCEEDS_MAX]
results: List[PreValidationResult] = await b.pre_validate_blocks_multiprocessing(
@ -2404,11 +2587,10 @@ class TestBodyValidation:
npc_result = get_name_puzzle_conditions(
block_generator,
min(b.constants.MAX_BLOCK_COST_CLVM * 1000, block.transactions_info.cost),
cost_per_byte=b.constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
)
result, err, _ = await b.receive_block(block_2, PreValidationResult(None, uint64(1), npc_result, False))
result, err, _ = await b.add_block(block_2, PreValidationResult(None, uint64(1), npc_result, False))
assert err == Err.INVALID_BLOCK_COST
# too low
@ -2429,11 +2611,10 @@ class TestBodyValidation:
npc_result = get_name_puzzle_conditions(
block_generator,
min(b.constants.MAX_BLOCK_COST_CLVM * 1000, block.transactions_info.cost),
cost_per_byte=b.constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
)
result, err, _ = await b.receive_block(block_2, PreValidationResult(None, uint64(1), npc_result, False))
result, err, _ = await b.add_block(block_2, PreValidationResult(None, uint64(1), npc_result, False))
assert err == Err.INVALID_BLOCK_COST
# too high
@ -2454,12 +2635,11 @@ class TestBodyValidation:
npc_result = get_name_puzzle_conditions(
block_generator,
min(b.constants.MAX_BLOCK_COST_CLVM * 1000, block.transactions_info.cost),
cost_per_byte=b.constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
)
result, err, _ = await b.receive_block(block_2, PreValidationResult(None, uint64(1), npc_result, False))
result, err, _ = await b.add_block(block_2, PreValidationResult(None, uint64(1), npc_result, False))
assert err == Err.INVALID_BLOCK_COST
# when the CLVM program exceeds cost during execution, it will fail with
@ -2487,9 +2667,9 @@ class TestBodyValidation:
# farmer_reward_puzzle_hash=bt.pool_ph,
# pool_reward_puzzle_hash=bt.pool_ph,
# )
# assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
# assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
# assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
# assert (await b.add_block(blocks[0]))[0] == AddBlockResult.NEW_PEAK
# assert (await b.add_block(blocks[1]))[0] == AddBlockResult.NEW_PEAK
# assert (await b.add_block(blocks[2]))[0] == AddBlockResult.NEW_PEAK
# wt: WalletTool = bt_2.get_pool_wallet_tool()
@ -2725,8 +2905,8 @@ class TestBodyValidation:
await _validate_and_add_block(b, block)
blocks_reorg = bt.get_consecutive_blocks(2, block_list_input=blocks[:-7], guarantee_transaction_block=True)
await _validate_and_add_block(b, blocks_reorg[-2], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, blocks_reorg[-1], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, blocks_reorg[-2], expected_result=AddBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN)
# Coin does not exist in reorg
blocks_reorg = bt.get_consecutive_blocks(
@ -2740,7 +2920,7 @@ class TestBodyValidation:
blocks_reorg = bt.get_consecutive_blocks(
1, block_list_input=blocks_reorg[:-1], guarantee_transaction_block=True, transaction_data=agg
)
await _validate_and_add_block(b, blocks_reorg[-1], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN)
blocks_reorg = bt.get_consecutive_blocks(
1, block_list_input=blocks_reorg, guarantee_transaction_block=True, transaction_data=tx_2
@ -2756,7 +2936,7 @@ class TestBodyValidation:
)
for block in blocks_reorg[-10:]:
await _validate_and_add_block_multi_result(
b, block, expected_result=[ReceiveBlockResult.ADDED_AS_ORPHAN, ReceiveBlockResult.NEW_PEAK]
b, block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.NEW_PEAK]
)
# ephemeral coin is spent
@ -2896,7 +3076,7 @@ class TestBodyValidation:
new_fsb_sig = bt.get_plot_signature(new_m, last_block.reward_chain_block.proof_of_space.plot_public_key)
last_block = recursive_replace(last_block, "foliage.foliage_transaction_block_signature", new_fsb_sig)
# Bad signature fails during receive_block
# Bad signature fails during add_block
await _validate_and_add_block(b, last_block, expected_error=Err.BAD_AGGREGATE_SIGNATURE)
# Bad signature also fails in prevalidation
@ -2918,9 +3098,9 @@ class TestReorgs:
blocks_reorg_chain = bt.get_consecutive_blocks(7, blocks[:10], seed=b"2")
for reorg_block in blocks_reorg_chain:
if reorg_block.height < 10:
await _validate_and_add_block(b, reorg_block, expected_result=ReceiveBlockResult.ALREADY_HAVE_BLOCK)
await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK)
elif reorg_block.height < 15:
await _validate_and_add_block(b, reorg_block, expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN)
elif reorg_block.height >= 15:
await _validate_and_add_block(b, reorg_block)
assert b.get_peak().height == 16
@ -2953,13 +3133,13 @@ class TestReorgs:
for reorg_block in test_long_reorg_blocks:
if reorg_block.height < num_blocks_chain_2_start:
await _validate_and_add_block(
b, reorg_block, expected_result=ReceiveBlockResult.ALREADY_HAVE_BLOCK, skip_prevalidation=True
b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, skip_prevalidation=True
)
elif reorg_block.weight <= chain_1_weight:
await _validate_and_add_block_multi_result(
b,
reorg_block,
[ReceiveBlockResult.ADDED_AS_ORPHAN, ReceiveBlockResult.ALREADY_HAVE_BLOCK],
[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.ALREADY_HAVE_BLOCK],
skip_prevalidation=True,
)
elif reorg_block.weight > chain_1_weight:
@ -2993,7 +3173,7 @@ class TestReorgs:
await _validate_and_add_block_multi_result(
b,
reorg_block,
expected_result=[ReceiveBlockResult.ADDED_AS_ORPHAN, ReceiveBlockResult.ALREADY_HAVE_BLOCK],
expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.ALREADY_HAVE_BLOCK],
)
elif reorg_block.height >= 15:
await _validate_and_add_block(b, reorg_block)
@ -3001,7 +3181,7 @@ class TestReorgs:
# Back to original chain
blocks_reorg_chain_2 = bt.get_consecutive_blocks(3, blocks, seed=b"3")
await _validate_and_add_block(b, blocks_reorg_chain_2[-3], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, blocks_reorg_chain_2[-3], expected_result=AddBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, blocks_reorg_chain_2[-2])
await _validate_and_add_block(b, blocks_reorg_chain_2[-1])
@ -3165,11 +3345,11 @@ async def test_reorg_new_ref(empty_blockchain, bt):
for i, block in enumerate(blocks_reorg_chain):
fork_point_with_peak = None
if i < 10:
expected = ReceiveBlockResult.ALREADY_HAVE_BLOCK
expected = AddBlockResult.ALREADY_HAVE_BLOCK
elif i < 20:
expected = ReceiveBlockResult.ADDED_AS_ORPHAN
expected = AddBlockResult.ADDED_AS_ORPHAN
else:
expected = ReceiveBlockResult.NEW_PEAK
expected = AddBlockResult.NEW_PEAK
fork_point_with_peak = uint32(1)
await _validate_and_add_block(b, block, expected_result=expected, fork_point_with_peak=fork_point_with_peak)
assert b.get_peak().height == 20
@ -3217,11 +3397,11 @@ async def test_reorg_stale_fork_height(empty_blockchain, bt):
blocks = bt.get_consecutive_blocks(4, blocks, previous_generator=[uint32(5)], transaction_data=spend_bundle2)
for block in blocks[:5]:
await _validate_and_add_block(b, block, expected_result=ReceiveBlockResult.NEW_PEAK)
await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK)
# fake the fork_height to make every new block look like a reorg
for block in blocks[5:]:
await _validate_and_add_block(b, block, expected_result=ReceiveBlockResult.NEW_PEAK, fork_point_with_peak=2)
await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_point_with_peak=2)
assert b.get_peak().height == 13
@ -3266,7 +3446,7 @@ async def test_chain_failed_rollback(empty_blockchain, bt):
)
for block in blocks_reorg_chain[10:-1]:
await _validate_and_add_block(b, block, expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN)
# Incorrectly set the height as spent in DB to trigger an error
print(f"{await b.coin_store.get_coin_record(spend_bundle.coin_spends[0].coin.name())}")
@ -3378,12 +3558,12 @@ async def test_reorg_flip_flop(empty_blockchain, bt):
preval: List[PreValidationResult] = await b.pre_validate_blocks_multiprocessing(
[block1], {}, validate_signatures=False
)
result, err, _ = await b.receive_block(block1, preval[0], fork_point_with_peak=fork_height)
result, err, _ = await b.add_block(block1, preval[0], fork_point_with_peak=fork_height)
assert not err
preval: List[PreValidationResult] = await b.pre_validate_blocks_multiprocessing(
[block2], {}, validate_signatures=False
)
result, err, _ = await b.receive_block(block2, preval[0], fork_point_with_peak=fork_height)
result, err, _ = await b.add_block(block2, preval[0], fork_point_with_peak=fork_height)
assert not err
assert b.get_peak().height == 39

View File

@ -305,10 +305,7 @@ class TestBlockchainTransactions:
coin_2 = None
for coin in run_and_get_removals_and_additions(
new_blocks[-1],
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
height=softfork_height,
new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM, height=softfork_height
)[1]:
if coin.puzzle_hash == receiver_1_puzzlehash:
coin_2 = coin
@ -329,10 +326,7 @@ class TestBlockchainTransactions:
coin_3 = None
for coin in run_and_get_removals_and_additions(
new_blocks[-1],
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
height=softfork_height,
new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM, height=softfork_height
)[1]:
if coin.puzzle_hash == receiver_2_puzzlehash:
coin_3 = coin

View File

@ -11,7 +11,8 @@ from chia.types.spend_bundle import SpendBundle
def cost_of_spend_bundle(spend_bundle: SpendBundle) -> int:
program: BlockGenerator = simple_solution_generator(spend_bundle)
# always use the post soft-fork2 semantics
npc_result: NPCResult = get_name_puzzle_conditions(
program, INFINITE_COST, cost_per_byte=DEFAULT_CONSTANTS.COST_PER_BYTE, mempool_mode=True
program, INFINITE_COST, mempool_mode=True, height=DEFAULT_CONSTANTS.SOFT_FORK2_HEIGHT
)
return npc_result.cost

View File

@ -54,19 +54,12 @@ class CoinStore:
self._add_coin_entry(coin, birthday)
return coin
def validate_spend_bundle(
self,
spend_bundle: SpendBundle,
now: CoinTimestamp,
max_cost: int,
cost_per_byte: int,
) -> int:
def validate_spend_bundle(self, spend_bundle: SpendBundle, now: CoinTimestamp, max_cost: int) -> int:
# this should use blockchain consensus code
program = simple_solution_generator(spend_bundle)
result: NPCResult = get_name_puzzle_conditions(
program, max_cost, cost_per_byte=cost_per_byte, mempool_mode=True
)
# always use the post soft-fork2 semantics
result: NPCResult = get_name_puzzle_conditions(program, max_cost, mempool_mode=True, height=uint32(4000000))
if result.error is not None:
raise BadSpendBundleError(f"condition validation failure {Err(result.error)}")
@ -87,7 +80,11 @@ class CoinStore:
err = mempool_check_time_locks(
ephemeral_db,
result.conds,
# TODO: this is technically not right, it's supposed to be the
# previous transaction block's height
uint32(now.height),
# TODO: this is technically not right, it's supposed to be the
# previous transaction block's timestamp
uint64(now.seconds),
)
@ -101,9 +98,8 @@ class CoinStore:
spend_bundle: SpendBundle,
now: CoinTimestamp,
max_cost: int,
cost_per_byte: int,
):
err = self.validate_spend_bundle(spend_bundle, now, max_cost, cost_per_byte)
err = self.validate_spend_bundle(spend_bundle, now, max_cost)
if err != 0:
raise BadSpendBundleError(f"validation failure {err}")
additions = spend_bundle.additions()

View File

@ -41,6 +41,14 @@ class TestPuzzleCompression:
assert coin_spend == CoinSpend.from_bytes(decompress_object_with_puzzles(compressed))
self.compression_factors["standard_puzzle"] = len(bytes(compressed)) / len(bytes(coin_spend))
def test_decompress_limit(self):
buffer = bytearray(10 * 1024 * 1024)
compressed = compress_object_with_puzzles(buffer, LATEST_VERSION)
print(len(compressed))
decompressed = decompress_object_with_puzzles(compressed)
print(len(decompressed))
assert len(decompressed) <= 6 * 1024 * 1024
def test_cat_puzzle(self):
coin_spend = CoinSpend(
COIN,

View File

@ -28,7 +28,6 @@ T1 = CoinTimestamp(1, 10000000)
T2 = CoinTimestamp(5, 10003000)
MAX_BLOCK_COST_CLVM = int(1e18)
COST_PER_BYTE = int(12000)
def secret_exponent_for_index(index: int) -> int:
@ -75,7 +74,7 @@ def do_test_spend(
coin_spend = CoinSpend(coin, puzzle_reveal, solution)
spend_bundle = SpendBundle([coin_spend], G2Element())
coin_db.update_coin_store_for_spend_bundle(spend_bundle, spend_time, MAX_BLOCK_COST_CLVM, COST_PER_BYTE)
coin_db.update_coin_store_for_spend_bundle(spend_bundle, spend_time, MAX_BLOCK_COST_CLVM)
# ensure all outputs are there
for puzzle_hash, amount in payments:

View File

@ -133,7 +133,7 @@ def db_version(request):
return request.param
@pytest.fixture(scope="function", params=[1000000, 3630000, 3830000])
@pytest.fixture(scope="function", params=[1000000, 3630000, 4000000])
def softfork_height(request):
return request.param
@ -457,8 +457,7 @@ async def one_node() -> AsyncIterator[Tuple[List[Service], List[FullNodeSimulato
yield _
# soft-fork 2 is disabled (for now)
@pytest.fixture(scope="function", params=[False])
@pytest.fixture(scope="function", params=[True, False])
def enable_softfork2(request):
return request.param

View File

@ -6,7 +6,7 @@ from typing import List, Optional, Set, Tuple
import pytest
from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from chia.consensus.blockchain import Blockchain, ReceiveBlockResult
from chia.consensus.blockchain import AddBlockResult, Blockchain
from chia.consensus.coinbase import create_farmer_coin, create_pool_coin
from chia.full_node.block_store import BlockStore
from chia.full_node.coin_store import CoinStore
@ -96,11 +96,7 @@ class TestCoinStoreWithBlocks:
if block.transactions_generator is not None:
block_gen: BlockGenerator = BlockGenerator(block.transactions_generator, [], [])
npc_result = get_name_puzzle_conditions(
block_gen,
bt.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=bt.constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
block_gen, bt.constants.MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height
)
tx_removals, tx_additions = tx_removals_and_additions(npc_result.conds)
else:
@ -350,15 +346,11 @@ class TestCoinStoreWithBlocks:
for reorg_block in blocks_reorg_chain:
if reorg_block.height < initial_block_count - 10:
await _validate_and_add_block(
b, reorg_block, expected_result=ReceiveBlockResult.ALREADY_HAVE_BLOCK
)
await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK)
elif reorg_block.height < initial_block_count:
await _validate_and_add_block(
b, reorg_block, expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN
)
await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN)
elif reorg_block.height >= initial_block_count:
await _validate_and_add_block(b, reorg_block, expected_result=ReceiveBlockResult.NEW_PEAK)
await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.NEW_PEAK)
if reorg_block.is_transaction_block():
coins = reorg_block.get_included_reward_coins()
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]

View File

@ -7,7 +7,7 @@ from typing import List, Optional
import pytest
import pytest_asyncio
from chia.consensus.blockchain import ReceiveBlockResult
from chia.consensus.blockchain import AddBlockResult
from chia.consensus.find_fork_point import find_fork_point_in_chain
from chia.consensus.multiprocess_validation import PreValidationResult
from chia.consensus.pot_iterations import is_overflow_block
@ -486,7 +486,7 @@ class TestFullNodeStore:
blocks_4[-1].reward_chain_block.signage_point_index
< test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA
)
await _validate_and_add_block(blockchain, blocks_4[-1], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)
await _validate_and_add_block(blockchain, blocks_4[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN)
sb = blockchain.block_record(blocks_4[-1].header_hash)
store.new_peak(sb, blocks_4[-1], None, None, None, blockchain)

View File

@ -22,7 +22,7 @@ from chia.types.condition_opcodes import ConditionOpcode
from chia.types.full_block import FullBlock
from chia.types.spend_bundle import SpendBundle
from chia.util.errors import Err
from chia.util.ints import uint32
from chia.util.ints import uint32, uint64
from ...blockchain.blockchain_test_utils import _validate_and_add_block
from .ram_db import create_ram_blockchain
@ -63,7 +63,7 @@ async def check_spend_bundle_validity(
) -> Tuple[List[CoinRecord], List[CoinRecord]]:
"""
This test helper create an extra block after the given blocks that contains the given
`SpendBundle`, and then invokes `receive_block` to ensure that it's accepted (if `expected_err=None`)
`SpendBundle`, and then invokes `add_block` to ensure that it's accepted (if `expected_err=None`)
or fails with the correct error code.
"""
if softfork2:
@ -81,6 +81,8 @@ async def check_spend_bundle_validity(
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
genesis_timestamp=uint64(10000),
time_per_block=10,
)
newest_block = additional_blocks[-1]
@ -126,8 +128,7 @@ co = ConditionOpcode
class TestConditions:
@pytest.mark.asyncio
# soft-fork 2 is disabled (for now)
@pytest.mark.parametrize("softfork2", [False])
@pytest.mark.parametrize("softfork2", [True, False])
@pytest.mark.parametrize(
"opcode,value,expected",
[
@ -135,13 +136,13 @@ class TestConditions:
# the coin being spent was created in the 3rd block (i.e. block 2)
# ensure invalid heights fail and pass correctly, depending on
# which end of the range they exceed
# genesis timestamp is 10000 and each block is 10 seconds
# MY BIRTH HEIGHT
(co.ASSERT_MY_BIRTH_HEIGHT, -1, Err.ASSERT_MY_BIRTH_HEIGHT_FAILED),
(co.ASSERT_MY_BIRTH_HEIGHT, 0x100000000, Err.ASSERT_MY_BIRTH_HEIGHT_FAILED),
(co.ASSERT_MY_BIRTH_HEIGHT, 3, Err.ASSERT_MY_BIRTH_HEIGHT_FAILED),
(co.ASSERT_MY_BIRTH_HEIGHT, 2, None),
# MY BIRTH SECONDS
# genesis timestamp is 10000 and each block is 10 seconds
(co.ASSERT_MY_BIRTH_SECONDS, -1, Err.ASSERT_MY_BIRTH_SECONDS_FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, 0x10000000000000000, Err.ASSERT_MY_BIRTH_SECONDS_FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, 10019, Err.ASSERT_MY_BIRTH_SECONDS_FAILED),
@ -153,23 +154,62 @@ class TestConditions:
(co.ASSERT_HEIGHT_RELATIVE, 1, None),
(co.ASSERT_HEIGHT_RELATIVE, 2, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 0x100000000, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
# BEFORE HEIGHT RELATIVE
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -1, Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 0, Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 1, Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 2, None),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 0x100000000, None),
# HEIGHT ABSOLUTE
(co.ASSERT_HEIGHT_ABSOLUTE, -1, None),
(co.ASSERT_HEIGHT_ABSOLUTE, 0, None),
(co.ASSERT_HEIGHT_ABSOLUTE, 3, None),
(co.ASSERT_HEIGHT_ABSOLUTE, 4, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED),
(co.ASSERT_HEIGHT_ABSOLUTE, 0x100000000, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED),
# BEFORE HEIGHT ABSOLUTE
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, -1, Err.ASSERT_BEFORE_HEIGHT_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 0, Err.IMPOSSIBLE_HEIGHT_ABSOLUTE_CONSTRAINTS),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 3, Err.ASSERT_BEFORE_HEIGHT_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 4, None),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 0x100000000, None),
# SECONDS RELATIVE
(co.ASSERT_SECONDS_RELATIVE, -1, None),
(co.ASSERT_SECONDS_RELATIVE, 0, None),
(co.ASSERT_SECONDS_RELATIVE, 10, None),
(co.ASSERT_SECONDS_RELATIVE, 11, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, 20, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, 21, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, 30, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, 0x10000000000000000, Err.ASSERT_SECONDS_RELATIVE_FAILED),
# BEFORE SECONDS RELATIVE
(co.ASSERT_BEFORE_SECONDS_RELATIVE, -1, Err.ASSERT_BEFORE_SECONDS_RELATIVE_FAILED),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 0, Err.ASSERT_BEFORE_SECONDS_RELATIVE_FAILED),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 10, Err.ASSERT_BEFORE_SECONDS_RELATIVE_FAILED),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 11, None),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 20, None),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 21, None),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 30, None),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 0x100000000000000, None),
# SECONDS ABSOLUTE
(co.ASSERT_SECONDS_ABSOLUTE, -1, None),
(co.ASSERT_SECONDS_ABSOLUTE, 0, None),
(co.ASSERT_SECONDS_ABSOLUTE, 10000, None),
(co.ASSERT_SECONDS_ABSOLUTE, 10049, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_SECONDS_ABSOLUTE, 10030, None),
(co.ASSERT_SECONDS_ABSOLUTE, 10031, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_SECONDS_ABSOLUTE, 10039, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_SECONDS_ABSOLUTE, 10040, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_SECONDS_ABSOLUTE, 10041, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_SECONDS_ABSOLUTE, 0x10000000000000000, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
# BEFORE SECONDS ABSOLUTE
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, -1, Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 0, Err.IMPOSSIBLE_SECONDS_ABSOLUTE_CONSTRAINTS),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10000, Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10030, Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10031, None),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10039, None),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10040, None),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10041, None),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 0x100000000, None),
],
)
async def test_condition(self, opcode, value, expected, bt, softfork2):
@ -180,9 +220,23 @@ class TestConditions:
if not softfork2 and opcode in [
co.ASSERT_MY_BIRTH_HEIGHT,
co.ASSERT_MY_BIRTH_SECONDS,
co.ASSERT_BEFORE_SECONDS_RELATIVE,
co.ASSERT_BEFORE_SECONDS_ABSOLUTE,
co.ASSERT_BEFORE_HEIGHT_RELATIVE,
co.ASSERT_BEFORE_HEIGHT_ABSOLUTE,
]:
expected = None
if not softfork2:
# before soft-fork 2, the timestamp we compared against was the
# current block's timestamp as opposed to the previous tx-block's
# timestamp. These conditions used to be valid, before the soft-fork
if opcode == ConditionOpcode.ASSERT_SECONDS_RELATIVE and value > 10 and value <= 20:
expected = None
if opcode == ConditionOpcode.ASSERT_SECONDS_ABSOLUTE and value > 10030 and value <= 10040:
expected = None
await check_conditions(bt, conditions, expected_err=expected, softfork2=softfork2)
@pytest.mark.asyncio

View File

@ -1200,6 +1200,7 @@ class TestFullNodeProtocol:
(4, Err.INVALID_PLOT_SIGNATURE),
(5, Err.INVALID_POSPACE),
(6, Err.INVALID_POSPACE),
(7, Err.TOO_MANY_GENERATOR_REFS),
],
)
async def test_unfinished_block_with_replaced_generator(self, wallet_nodes, self_hostname, committment, expected):
@ -1315,6 +1316,10 @@ class TestFullNodeProtocol:
else:
reward_chain_block = block.reward_chain_block.get_unfinished()
generator_refs: List[uint32] = []
if committment > 6:
generator_refs = [uint32(n) for n in range(600)]
unf = UnfinishedBlock(
block.finished_sub_slots[:] if not overflow else block.finished_sub_slots[:-1],
reward_chain_block,
@ -1324,7 +1329,7 @@ class TestFullNodeProtocol:
transaction_block,
transactions_info,
replaced_generator,
[],
generator_refs,
)
_, header_error = await full_node_1.full_node.blockchain.validate_unfinished_block_header(unf)

View File

@ -0,0 +1,5 @@
from __future__ import annotations
parallel = True
job_timeout = 50
checkout_blocks_and_plots = False

View File

@ -372,23 +372,25 @@ class TestMempoolManager:
@pytest.mark.parametrize(
"opcode,lock_value,expected",
[
# the mempool rules don't allow relative height- or time conditions on
# ephemeral spends
(co.ASSERT_MY_BIRTH_HEIGHT, -1, mis.FAILED),
(co.ASSERT_MY_BIRTH_HEIGHT, 0x100000000, mis.FAILED),
(co.ASSERT_MY_BIRTH_HEIGHT, 5, mis.FAILED),
(co.ASSERT_MY_BIRTH_HEIGHT, 6, mis.SUCCESS),
(co.ASSERT_MY_BIRTH_HEIGHT, 6, mis.FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, -1, mis.FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, 0x10000000000000000, mis.FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, 10049, mis.FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, 10050, mis.SUCCESS),
(co.ASSERT_MY_BIRTH_SECONDS, 10050, mis.FAILED),
(co.ASSERT_MY_BIRTH_SECONDS, 10051, mis.FAILED),
(co.ASSERT_SECONDS_RELATIVE, -2, mis.SUCCESS),
(co.ASSERT_SECONDS_RELATIVE, -1, mis.SUCCESS),
(co.ASSERT_SECONDS_RELATIVE, 0, mis.SUCCESS),
(co.ASSERT_SECONDS_RELATIVE, -2, mis.FAILED),
(co.ASSERT_SECONDS_RELATIVE, -1, mis.FAILED),
(co.ASSERT_SECONDS_RELATIVE, 0, mis.FAILED),
(co.ASSERT_SECONDS_RELATIVE, 1, mis.FAILED),
(co.ASSERT_HEIGHT_RELATIVE, -2, mis.SUCCESS),
(co.ASSERT_HEIGHT_RELATIVE, -1, mis.SUCCESS),
(co.ASSERT_HEIGHT_RELATIVE, 0, mis.PENDING),
(co.ASSERT_HEIGHT_RELATIVE, 1, mis.PENDING),
(co.ASSERT_HEIGHT_RELATIVE, -2, mis.FAILED),
(co.ASSERT_HEIGHT_RELATIVE, -1, mis.FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 0, mis.FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 1, mis.FAILED),
# the absolute height and seconds tests require fresh full nodes to
# run the test on. The fixture (one_node_one_block) creates a block,
# then condition_tester2 creates another 3 blocks
@ -1968,9 +1970,7 @@ def generator_condition_tester(
program = SerializedProgram.from_bytes(binutils.assemble(prg).as_bin())
generator = BlockGenerator(program, [], [])
print(f"len: {len(bytes(program))}")
npc_result: NPCResult = get_name_puzzle_conditions(
generator, max_cost, cost_per_byte=COST_PER_BYTE, mempool_mode=mempool_mode, height=height
)
npc_result: NPCResult = get_name_puzzle_conditions(generator, max_cost, mempool_mode=mempool_mode, height=height)
return npc_result
@ -2149,7 +2149,7 @@ class TestGeneratorConditions:
)
generator = BlockGenerator(program, [], [])
npc_result: NPCResult = get_name_puzzle_conditions(
generator, MAX_BLOCK_COST_CLVM, cost_per_byte=COST_PER_BYTE, mempool_mode=False, height=softfork_height
generator, MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height
)
assert npc_result.error is None
assert len(npc_result.conds.spends) == 2
@ -2192,18 +2192,10 @@ class TestGeneratorConditions:
coins = npc_result.conds.spends[0].create_coin
assert coins == [(puzzle_hash_1.encode("ascii"), 5, hint.encode("ascii"))]
@pytest.mark.parametrize(
"mempool,height",
[
(True, None),
(False, 2300000),
(False, 3630000),
(False, 3830000),
],
)
def test_unknown_condition(self, mempool: bool, height: uint32):
@pytest.mark.parametrize("mempool", [True, False])
def test_unknown_condition(self, mempool: bool, softfork_height: uint32):
for c in ['(2 100 "foo" "bar")', "(100)", "(4 1) (2 2) (3 3)", '("foobar")']:
npc_result = generator_condition_tester(c, mempool_mode=mempool, height=height)
npc_result = generator_condition_tester(c, mempool_mode=mempool, height=softfork_height)
print(npc_result)
if mempool:
assert npc_result.error == Err.INVALID_CONDITION.value

View File

@ -11,7 +11,15 @@ from chia.consensus.constants import ConsensusConstants
from chia.consensus.cost_calculator import NPCResult
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.full_node.mempool_check_conditions import mempool_check_time_locks
from chia.full_node.mempool_manager import MempoolManager, can_replace, compute_assert_height
from chia.full_node.mempool_manager import (
MEMPOOL_MIN_FEE_INCREASE,
MempoolManager,
TimelockConditions,
can_replace,
compute_assert_height,
optional_max,
optional_min,
)
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
@ -125,6 +133,10 @@ def make_test_conds(
height_absolute: int = 0,
seconds_relative: Optional[int] = None,
seconds_absolute: int = 0,
before_height_relative: Optional[int] = None,
before_height_absolute: Optional[int] = None,
before_seconds_relative: Optional[int] = None,
before_seconds_absolute: Optional[int] = None,
cost: int = 0,
) -> SpendBundleConditions:
return SpendBundleConditions(
@ -134,8 +146,8 @@ def make_test_conds(
IDENTITY_PUZZLE_HASH,
None if height_relative is None else uint32(height_relative),
None if seconds_relative is None else uint64(seconds_relative),
None,
None,
None if before_height_relative is None else uint32(before_height_relative),
None if before_seconds_relative is None else uint64(before_seconds_relative),
None if birth_height is None else uint32(birth_height),
None if birth_seconds is None else uint64(birth_seconds),
[],
@ -146,8 +158,8 @@ def make_test_conds(
0,
uint32(height_absolute),
uint64(seconds_absolute),
None,
None,
None if before_height_absolute is None else uint32(before_height_absolute),
None if before_seconds_absolute is None else uint64(before_seconds_absolute),
[],
cost,
0,
@ -189,6 +201,18 @@ class TestCheckTimeLocks:
(make_test_conds(birth_seconds=9999), Err.ASSERT_MY_BIRTH_SECONDS_FAILED),
(make_test_conds(birth_seconds=10000), None),
(make_test_conds(birth_seconds=10001), Err.ASSERT_MY_BIRTH_SECONDS_FAILED),
# the coin is 5 blocks old in this test
(make_test_conds(before_height_relative=5), Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED),
(make_test_conds(before_height_relative=6), None),
# The block height is 15
(make_test_conds(before_height_absolute=15), Err.ASSERT_BEFORE_HEIGHT_ABSOLUTE_FAILED),
(make_test_conds(before_height_absolute=16), None),
# the coin is 150 seconds old in this test
(make_test_conds(before_seconds_relative=150), Err.ASSERT_BEFORE_SECONDS_RELATIVE_FAILED),
(make_test_conds(before_seconds_relative=151), None),
# The block timestamp is 10150
(make_test_conds(before_seconds_absolute=10150), Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED),
(make_test_conds(before_seconds_absolute=10151), None),
],
)
def test_conditions(
@ -197,33 +221,78 @@ class TestCheckTimeLocks:
expected: Optional[Err],
) -> None:
assert (
mempool_check_time_locks(self.REMOVALS, conds, self.PREV_BLOCK_HEIGHT, self.PREV_BLOCK_TIMESTAMP)
mempool_check_time_locks(
self.REMOVALS,
conds,
self.PREV_BLOCK_HEIGHT,
self.PREV_BLOCK_TIMESTAMP,
)
== expected
)
def expect(*, height: int = 0) -> uint32:
return uint32(height)
def expect(
*, height: int = 0, before_height: Optional[int] = None, before_seconds: Optional[int] = None
) -> TimelockConditions:
ret = TimelockConditions(uint32(height))
if before_height is not None:
ret.assert_before_height = uint32(before_height)
if before_seconds is not None:
ret.assert_before_seconds = uint64(before_seconds)
return ret
@pytest.mark.parametrize(
"conds,expected",
[
# ASSERT_HEIGHT_*
# coin birth height is 12
(make_test_conds(), expect()),
(make_test_conds(height_absolute=42), expect(height=42)),
# 1 is a relative height, but that only amounts to 13, so the absolute
# height is more restrictive
(make_test_conds(height_relative=1), expect(height=13)),
# 100 is a relative height, and sinec the coin was confirmed at height 12,
# 100 is a relative height, and since the coin was confirmed at height 12,
# that's 112
(make_test_conds(height_absolute=42, height_relative=100), expect(height=112)),
# Same thing but without the absolute height
(make_test_conds(height_relative=100), expect(height=112)),
(make_test_conds(height_relative=0), expect(height=12)),
# 42 is more restrictive than 13
(make_test_conds(height_absolute=42, height_relative=1), expect(height=42)),
# ASSERT_BEFORE_HEIGHT_*
(make_test_conds(before_height_absolute=100), expect(before_height=100)),
# coin is created at 12 + 1 relative height = 13
(make_test_conds(before_height_relative=1), expect(before_height=13)),
# coin is created at 12 + 0 relative height = 12
(make_test_conds(before_height_relative=0), expect(before_height=12)),
# 13 is more restrictive than 42
(make_test_conds(before_height_absolute=42, before_height_relative=1), expect(before_height=13)),
# 100 is a relative height, and since the coin was confirmed at height 12,
# that's 112
(make_test_conds(before_height_absolute=200, before_height_relative=100), expect(before_height=112)),
# Same thing but without the absolute height
(make_test_conds(before_height_relative=100), expect(before_height=112)),
# ASSERT_BEFORE_SECONDS_*
# coin timestamp is 10000
# single absolute assert before seconds
(make_test_conds(before_seconds_absolute=20000), expect(before_seconds=20000)),
# coin is created at 10000 + 100 relative seconds = 10100
(make_test_conds(before_seconds_relative=100), expect(before_seconds=10100)),
# coin is created at 10000 + 0 relative seconds = 10000
(make_test_conds(before_seconds_relative=0), expect(before_seconds=10000)),
# 10100 is more restrictive than 20000
(make_test_conds(before_seconds_absolute=20000, before_seconds_relative=100), expect(before_seconds=10100)),
# 20000 is a relative seconds, and since the coin was confirmed at seconds
# 10000 that's 300000
(make_test_conds(before_seconds_absolute=20000, before_seconds_relative=20000), expect(before_seconds=20000)),
# Same thing but without the absolute seconds
(make_test_conds(before_seconds_relative=20000), expect(before_seconds=30000)),
],
)
def test_compute_assert_height(conds: SpendBundleConditions, expected: uint32) -> None:
def test_compute_assert_height(conds: SpendBundleConditions, expected: TimelockConditions) -> None:
coin_id = TEST_COIN.name()
confirmed_height = uint32(12)
coin_records = {coin_id: CoinRecord(TEST_COIN, confirmed_height, uint32(0), False, uint64(10000))}
@ -425,40 +494,58 @@ mis = MempoolInclusionStatus
@pytest.mark.asyncio
# soft-fork 2 is disabled (for now)
@pytest.mark.parametrize("softfork2", [False])
@pytest.mark.parametrize("softfork2", [False, True])
@pytest.mark.parametrize(
"opcode,lock_value,expected_status,expected_error",
[
# the mempool rules don't allow relative height- or time conditions on
# ephemeral spends
# SECONDS RELATIVE
(co.ASSERT_SECONDS_RELATIVE, -2, mis.SUCCESS, None),
(co.ASSERT_SECONDS_RELATIVE, -1, mis.SUCCESS, None),
# The rules allow spending an ephemeral coin with an ASSERT_SECONDS_RELATIVE 0 condition
(co.ASSERT_SECONDS_RELATIVE, 0, mis.SUCCESS, None),
(co.ASSERT_SECONDS_RELATIVE, 1, mis.FAILED, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, 9, mis.FAILED, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, 10, mis.FAILED, Err.ASSERT_SECONDS_RELATIVE_FAILED),
(co.ASSERT_SECONDS_RELATIVE, -2, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_SECONDS_RELATIVE, -1, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_SECONDS_RELATIVE, 0, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_SECONDS_RELATIVE, 1, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_SECONDS_RELATIVE, 9, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_SECONDS_RELATIVE, 10, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
# HEIGHT RELATIVE
(co.ASSERT_HEIGHT_RELATIVE, -2, mis.SUCCESS, None),
(co.ASSERT_HEIGHT_RELATIVE, -1, mis.SUCCESS, None),
(co.ASSERT_HEIGHT_RELATIVE, 0, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 1, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 5, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 6, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 7, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 10, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, 11, mis.PENDING, Err.ASSERT_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_HEIGHT_RELATIVE, -2, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, -1, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 0, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 1, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 5, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 6, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 7, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 10, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_HEIGHT_RELATIVE, 11, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
# BEFORE HEIGHT RELATIVE
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -2, mis.FAILED, Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, -1, mis.FAILED, Err.ASSERT_BEFORE_HEIGHT_RELATIVE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 0, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 1, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 5, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 6, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 7, mis.FAILED, Err.EPHEMERAL_RELATIVE_CONDITION),
# HEIGHT ABSOLUTE
(co.ASSERT_HEIGHT_ABSOLUTE, 4, mis.SUCCESS, None),
(co.ASSERT_HEIGHT_ABSOLUTE, 5, mis.SUCCESS, None),
(co.ASSERT_HEIGHT_ABSOLUTE, 6, mis.PENDING, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED),
(co.ASSERT_HEIGHT_ABSOLUTE, 7, mis.PENDING, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED),
# BEFORE HEIGHT ABSOLUTE
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 4, mis.FAILED, Err.ASSERT_BEFORE_HEIGHT_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 5, mis.FAILED, Err.ASSERT_BEFORE_HEIGHT_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 6, mis.SUCCESS, None),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 7, mis.SUCCESS, None),
# SECONDS ABSOLUTE
# Current block timestamp is 10050
(co.ASSERT_SECONDS_ABSOLUTE, 10049, mis.SUCCESS, None),
(co.ASSERT_SECONDS_ABSOLUTE, 10050, mis.SUCCESS, None),
(co.ASSERT_SECONDS_ABSOLUTE, 10051, mis.FAILED, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_SECONDS_ABSOLUTE, 10052, mis.FAILED, Err.ASSERT_SECONDS_ABSOLUTE_FAILED),
# BEFORE SECONDS ABSOLUTE
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10049, mis.FAILED, Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10050, mis.FAILED, Err.ASSERT_BEFORE_SECONDS_ABSOLUTE_FAILED),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10051, mis.SUCCESS, None),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10052, mis.SUCCESS, None),
],
)
async def test_ephemeral_timelock(
@ -487,6 +574,7 @@ async def test_ephemeral_timelock(
co.ASSERT_BEFORE_SECONDS_RELATIVE,
]:
expected_error = Err.INVALID_CONDITION
expected_status = MempoolInclusionStatus.FAILED
conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1]]
created_coin = Coin(TEST_COIN_ID, IDENTITY_PUZZLE_HASH, 1)
@ -496,17 +584,52 @@ async def test_ephemeral_timelock(
sb = SpendBundle.aggregate([sb1, sb2])
# We shouldn't have a record of this ephemeral coin
assert await get_coin_record_for_test_coins(created_coin.name()) is None
_, status, error = await add_spendbundle(mempool_manager, sb, sb.name())
assert (status, error) == (expected_status, expected_error)
try:
_, status, error = await add_spendbundle(mempool_manager, sb, sb.name())
assert (status, error) == (expected_status, expected_error)
except ValidationError as e:
assert expected_status == mis.FAILED
assert expected_error == e.code
def mk_item(coins: List[Coin], *, cost: int = 1, fee: int = 0) -> MempoolItem:
def test_optional_min() -> None:
assert optional_min(uint32(100), None) == uint32(100)
assert optional_min(None, uint32(100)) == uint32(100)
assert optional_min(None, None) is None
assert optional_min(uint32(123), uint32(234)) == uint32(123)
def test_optional_max() -> None:
assert optional_max(uint32(100), None) == uint32(100)
assert optional_max(None, uint32(100)) == uint32(100)
assert optional_max(None, None) is None
assert optional_max(uint32(123), uint32(234)) == uint32(234)
def mk_item(
coins: List[Coin],
*,
cost: int = 1,
fee: int = 0,
assert_height: Optional[int] = None,
assert_before_height: Optional[int] = None,
assert_before_seconds: Optional[int] = None,
) -> MempoolItem:
# we don't actually care about the puzzle and solutions for the purpose of
# can_replace()
spends = [CoinSpend(c, SerializedProgram(), SerializedProgram()) for c in coins]
spend_bundle = SpendBundle(spends, G2Element())
npc_results = NPCResult(None, make_test_conds(cost=cost), uint64(cost))
return MempoolItem(spend_bundle, uint64(fee), npc_results, spend_bundle.name(), uint32(0))
return MempoolItem(
spend_bundle,
uint64(fee),
npc_results,
spend_bundle.name(),
uint32(0),
None if assert_height is None else uint32(assert_height),
None if assert_before_height is None else uint32(assert_before_height),
None if assert_before_seconds is None else uint64(assert_before_seconds),
)
def make_test_coins() -> List[Coin]:
@ -560,6 +683,95 @@ coins = make_test_coins()
mk_item(coins[0:2], fee=10000200, cost=10000200),
False,
),
# TIMELOCK RULE
# the new item must not have different time lock than the existing item(s)
# the assert height time lock condition was introduced in the new item
([mk_item(coins[0:1])], mk_item(coins[0:1], fee=10000000, assert_height=1000), False),
# the assert before height time lock condition was introduced in the new item
([mk_item(coins[0:1])], mk_item(coins[0:1], fee=10000000, assert_before_height=1000), False),
# the assert before seconds time lock condition was introduced in the new item
([mk_item(coins[0:1])], mk_item(coins[0:1], fee=10000000, assert_before_seconds=1000), False),
# if we don't alter any time locks, we are allowed to replace
([mk_item(coins[0:1])], mk_item(coins[0:1], fee=10000000), True),
# ASSERT_HEIGHT
# the assert height time lock condition was removed in the new item
([mk_item(coins[0:1], assert_height=1000)], mk_item(coins[0:1], fee=10000000), False),
# different assert height constraint
([mk_item(coins[0:1], assert_height=1000)], mk_item(coins[0:1], fee=10000000, assert_height=100), False),
([mk_item(coins[0:1], assert_height=1000)], mk_item(coins[0:1], fee=10000000, assert_height=2000), False),
# the same assert height is OK
([mk_item(coins[0:1], assert_height=1000)], mk_item(coins[0:1], fee=10000000, assert_height=1000), True),
# The new spend just have to match the most restrictive condition
(
[mk_item(coins[0:1], assert_height=200), mk_item(coins[1:2], assert_height=400)],
mk_item(coins[0:2], fee=10000000, assert_height=400),
True,
),
# ASSERT_BEFORE_HEIGHT
# the assert before height time lock condition was removed in the new item
([mk_item(coins[0:1], assert_before_height=1000)], mk_item(coins[0:1], fee=10000000), False),
# different assert before height constraint
(
[mk_item(coins[0:1], assert_before_height=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_height=100),
False,
),
(
[mk_item(coins[0:1], assert_before_height=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_height=2000),
False,
),
# The new spend just have to match the most restrictive condition
(
[mk_item(coins[0:1], assert_before_height=200), mk_item(coins[1:2], assert_before_height=400)],
mk_item(coins[0:2], fee=10000000, assert_before_height=200),
True,
),
# ASSERT_BEFORE_SECONDS
# the assert before height time lock condition was removed in the new item
([mk_item(coins[0:1], assert_before_seconds=1000)], mk_item(coins[0:1], fee=10000000), False),
# different assert before seconds constraint
(
[mk_item(coins[0:1], assert_before_seconds=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_seconds=100),
False,
),
(
[mk_item(coins[0:1], assert_before_seconds=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_seconds=2000),
False,
),
# the assert before height time lock condition was introduced in the new item
(
[mk_item(coins[0:1], assert_before_seconds=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_seconds=1000),
True,
),
# The new spend just have to match the most restrictive condition
(
[mk_item(coins[0:1], assert_before_seconds=200), mk_item(coins[1:2], assert_before_seconds=400)],
mk_item(coins[0:2], fee=10000000, assert_before_seconds=200),
True,
),
# MIXED CONDITIONS
# we can't replace an assert_before_seconds with assert_before_height
(
[mk_item(coins[0:1], assert_before_seconds=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_height=2000),
False,
),
# we added another condition
(
[mk_item(coins[0:1], assert_before_seconds=1000)],
mk_item(coins[0:1], fee=10000000, assert_before_seconds=1000, assert_height=200),
False,
),
# we removed assert before height
(
[mk_item(coins[0:1], assert_height=200, assert_before_height=1000)],
mk_item(coins[0:1], fee=10000000, assert_height=200),
False,
),
],
)
def test_can_replace(existing_items: List[MempoolItem], new_item: MempoolItem, expected: bool) -> None:
@ -713,3 +925,209 @@ async def test_create_bundle_from_mempool_on_max_cost() -> None:
# The first spend bundle hits the maximum block clvm cost and gets skipped
assert additions == [Coin(coins[1].name(), IDENTITY_PUZZLE_HASH, coins[1].amount - 2)]
assert removals == [coins[1]]
@pytest.mark.parametrize(
"opcode,arg,expect_eviction",
[
# current height: 10 current_time: 10000
# we step the chain forward 1 block and 19 seconds
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10001, True),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10019, True),
(co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10020, False),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 11, True),
(co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 12, False),
# the coin was created at height: 5 timestamp: 9900
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 6, True),
(co.ASSERT_BEFORE_HEIGHT_RELATIVE, 7, False),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 119, True),
(co.ASSERT_BEFORE_SECONDS_RELATIVE, 120, False),
],
)
@pytest.mark.asyncio
async def test_assert_before_expiration(opcode: ConditionOpcode, arg: int, expect_eviction: bool) -> None:
async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
return {TEST_COIN.name(): CoinRecord(TEST_COIN, uint32(5), uint32(0), False, uint64(9900))}.get(coin_id)
mempool_manager = await instantiate_mempool_manager(
get_coin_record,
block_height=uint32(10),
block_timestamp=uint64(10000),
constants=DEFAULT_CONSTANTS.replace(SOFT_FORK2_HEIGHT=0),
)
bundle = spend_bundle_from_conditions(
[
[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1],
[opcode, arg],
],
coin=TEST_COIN,
)
bundle_name = bundle.name()
assert (await add_spendbundle(mempool_manager, bundle, bundle_name))[1] == mis.SUCCESS
# make sure the spend was added correctly
assert mempool_manager.get_spendbundle(bundle_name) == bundle
block_record = create_test_block_record(height=uint32(11), timestamp=uint64(10019))
await mempool_manager.new_peak(block_record, None)
still_in_pool = mempool_manager.get_spendbundle(bundle_name) == bundle
assert still_in_pool != expect_eviction
def make_test_spendbundle(coin: Coin, *, fee: int = 0) -> SpendBundle:
conditions = [
[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, uint64(coin.amount - fee)],
[ConditionOpcode.AGG_SIG_UNSAFE, G1Element(), IDENTITY_PUZZLE_HASH],
]
return spend_bundle_from_conditions(conditions, coin)
async def send_spendbundle(
mempool_manager: MempoolManager,
sb: SpendBundle,
expected_result: Tuple[MempoolInclusionStatus, Optional[Err]] = (MempoolInclusionStatus.SUCCESS, None),
) -> None:
result = await add_spendbundle(mempool_manager, sb, sb.name())
assert (result[1], result[2]) == expected_result
async def make_and_send_spendbundle(
mempool_manager: MempoolManager,
coin: Coin,
*,
fee: int = 0,
expected_result: Tuple[MempoolInclusionStatus, Optional[Err]] = (MempoolInclusionStatus.SUCCESS, None),
) -> SpendBundle:
sb = make_test_spendbundle(coin, fee=fee)
await send_spendbundle(mempool_manager, sb, expected_result)
return sb
def assert_sb_in_pool(mempool_manager: MempoolManager, sb: SpendBundle) -> None:
assert sb == mempool_manager.get_spendbundle(sb.name())
def assert_sb_not_in_pool(mempool_manager: MempoolManager, sb: SpendBundle) -> None:
assert mempool_manager.get_spendbundle(sb.name()) is None
@pytest.mark.asyncio
async def test_insufficient_fee_increase() -> None:
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
sb1_1 = await make_and_send_spendbundle(mempool_manager, coins[0])
sb1_2 = await make_and_send_spendbundle(
mempool_manager, coins[0], fee=1, expected_result=(MempoolInclusionStatus.PENDING, Err.MEMPOOL_CONFLICT)
)
# The old spendbundle must stay
assert_sb_in_pool(mempool_manager, sb1_1)
assert_sb_not_in_pool(mempool_manager, sb1_2)
@pytest.mark.asyncio
async def test_sufficient_fee_increase() -> None:
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
sb1_1 = await make_and_send_spendbundle(mempool_manager, coins[0])
sb1_2 = await make_and_send_spendbundle(mempool_manager, coins[0], fee=MEMPOOL_MIN_FEE_INCREASE)
# sb1_1 gets replaced with sb1_2
assert_sb_not_in_pool(mempool_manager, sb1_1)
assert_sb_in_pool(mempool_manager, sb1_2)
@pytest.mark.asyncio
async def test_superset() -> None:
# Aggregated spendbundle sb12 replaces sb1 since it spends a superset
# of coins spent in sb1
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
sb1 = await make_and_send_spendbundle(mempool_manager, coins[0])
sb2 = make_test_spendbundle(coins[1], fee=MEMPOOL_MIN_FEE_INCREASE)
sb12 = SpendBundle.aggregate([sb2, sb1])
await send_spendbundle(mempool_manager, sb12)
assert_sb_in_pool(mempool_manager, sb12)
assert_sb_not_in_pool(mempool_manager, sb1)
@pytest.mark.asyncio
async def test_superset_violation() -> None:
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
sb1 = make_test_spendbundle(coins[0])
sb2 = make_test_spendbundle(coins[1])
sb12 = SpendBundle.aggregate([sb1, sb2])
await send_spendbundle(mempool_manager, sb12)
assert_sb_in_pool(mempool_manager, sb12)
# sb23 must not replace existing sb12 as the former does not spend all
# coins that are spent in the latter (specifically, the first coin)
sb3 = make_test_spendbundle(coins[2], fee=MEMPOOL_MIN_FEE_INCREASE)
sb23 = SpendBundle.aggregate([sb2, sb3])
await send_spendbundle(
mempool_manager, sb23, expected_result=(MempoolInclusionStatus.PENDING, Err.MEMPOOL_CONFLICT)
)
assert_sb_in_pool(mempool_manager, sb12)
assert_sb_not_in_pool(mempool_manager, sb23)
@pytest.mark.asyncio
async def test_total_fpc_decrease() -> None:
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
sb1 = make_test_spendbundle(coins[0])
sb2 = make_test_spendbundle(coins[1], fee=MEMPOOL_MIN_FEE_INCREASE * 2)
sb12 = SpendBundle.aggregate([sb1, sb2])
await send_spendbundle(mempool_manager, sb12)
sb3 = await make_and_send_spendbundle(mempool_manager, coins[2], fee=MEMPOOL_MIN_FEE_INCREASE * 2)
assert_sb_in_pool(mempool_manager, sb12)
assert_sb_in_pool(mempool_manager, sb3)
# sb1234 should not be in pool as it decreases total fees per cost
sb4 = make_test_spendbundle(coins[3], fee=MEMPOOL_MIN_FEE_INCREASE)
sb1234 = SpendBundle.aggregate([sb12, sb3, sb4])
await send_spendbundle(
mempool_manager, sb1234, expected_result=(MempoolInclusionStatus.PENDING, Err.MEMPOOL_CONFLICT)
)
assert_sb_not_in_pool(mempool_manager, sb1234)
@pytest.mark.asyncio
async def test_sufficient_total_fpc_increase() -> None:
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
sb1 = make_test_spendbundle(coins[0])
sb2 = make_test_spendbundle(coins[1], fee=MEMPOOL_MIN_FEE_INCREASE * 2)
sb12 = SpendBundle.aggregate([sb1, sb2])
await send_spendbundle(mempool_manager, sb12)
sb3 = await make_and_send_spendbundle(mempool_manager, coins[2], fee=MEMPOOL_MIN_FEE_INCREASE * 2)
assert_sb_in_pool(mempool_manager, sb12)
assert_sb_in_pool(mempool_manager, sb3)
# sb1234 has a higher fee per cost than its conflicts and should get
# into the mempool
sb4 = make_test_spendbundle(coins[3], fee=MEMPOOL_MIN_FEE_INCREASE * 3)
sb1234 = SpendBundle.aggregate([sb12, sb3, sb4])
await send_spendbundle(mempool_manager, sb1234)
assert_sb_in_pool(mempool_manager, sb1234)
assert_sb_not_in_pool(mempool_manager, sb12)
assert_sb_not_in_pool(mempool_manager, sb3)
@pytest.mark.asyncio
async def test_spends_by_feerate() -> None:
# This test makes sure we're properly sorting items by fee rate
async def send_to_mempool_returning_item(
mempool_manager: MempoolManager, coin: Coin, *, fee: int = 0
) -> MempoolItem:
conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, coin.amount - fee]]
sb = spend_bundle_from_conditions(conditions, coin)
result = await add_spendbundle(mempool_manager, sb, sb.name())
assert result[1] == MempoolInclusionStatus.SUCCESS
mi = mempool_manager.get_mempool_item(sb.name())
assert mi is not None
return mi
mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000005)))
# Create a ~3.73 FPC item
mi1 = await send_to_mempool_returning_item(mempool_manager, coins[0], fee=11000000)
# Create a ~3.39 FPC item
mi2 = await send_to_mempool_returning_item(mempool_manager, coins[1], fee=10000000)
# Create a ~3.56 FPC item
mi3 = await send_to_mempool_returning_item(mempool_manager, coins[2], fee=10500000)
assert mi1.fee_per_cost > mi2.fee_per_cost
assert mi1.fee_per_cost > mi3.fee_per_cost
assert mi3.fee_per_cost > mi2.fee_per_cost
items = mempool_manager.mempool.spends_by_feerate()
assert list(items) == [mi1, mi3, mi2]

View File

@ -77,11 +77,7 @@ class TestCostCalculation:
program: BlockGenerator = simple_solution_generator(spend_bundle)
npc_result: NPCResult = get_name_puzzle_conditions(
program,
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
program, test_constants.MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height
)
assert npc_result.error is None
@ -147,19 +143,11 @@ class TestCostCalculation:
)
generator = BlockGenerator(program, [], [])
npc_result: NPCResult = get_name_puzzle_conditions(
generator,
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
mempool_mode=True,
height=softfork_height,
generator, test_constants.MAX_BLOCK_COST_CLVM, mempool_mode=True, height=softfork_height
)
assert npc_result.error is not None
npc_result = get_name_puzzle_conditions(
generator,
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
generator, test_constants.MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height
)
assert npc_result.error is None
@ -182,18 +170,11 @@ class TestCostCalculation:
program = SerializedProgram.from_bytes(binutils.assemble(f"(i (0xfe (q . 0)) (q . ()) {disassembly})").as_bin())
generator = BlockGenerator(program, [], [])
npc_result: NPCResult = get_name_puzzle_conditions(
generator,
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
mempool_mode=True,
generator, test_constants.MAX_BLOCK_COST_CLVM, mempool_mode=True, height=softfork_height
)
assert npc_result.error is not None
npc_result = get_name_puzzle_conditions(
generator,
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
generator, test_constants.MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height
)
assert npc_result.error is None
@ -207,11 +188,7 @@ class TestCostCalculation:
with assert_runtime(seconds=0.5, label=request.node.name):
generator = BlockGenerator(program, [], [])
npc_result = get_name_puzzle_conditions(
generator,
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
mempool_mode=False,
height=softfork_height,
generator, test_constants.MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height
)
assert npc_result.error is None
@ -232,22 +209,14 @@ class TestCostCalculation:
# ensure we fail if the program exceeds the cost
generator = BlockGenerator(program, [], [])
npc_result: NPCResult = get_name_puzzle_conditions(
generator,
10000000,
cost_per_byte=0,
mempool_mode=False,
height=softfork_height,
)
npc_result = get_name_puzzle_conditions(generator, 10000000, mempool_mode=False, height=softfork_height)
assert npc_result.error is not None
assert npc_result.cost == 0
# raise the max cost to make sure this passes
# ensure we pass if the program does not exceeds the cost
npc_result = get_name_puzzle_conditions(
generator, 23000000, cost_per_byte=0, mempool_mode=False, height=softfork_height
)
npc_result = get_name_puzzle_conditions(generator, 23000000, mempool_mode=False, height=softfork_height)
assert npc_result.error is None
assert npc_result.cost > 10000000

View File

@ -77,7 +77,7 @@ class TestDbUpgrade:
for block in blocks:
# await _validate_and_add_block(bc, block)
results = PreValidationResult(None, uint64(1), None, False)
result, err, _ = await bc.receive_block(block, results)
result, err, _ = await bc.add_block(block, results)
assert err is None
finally:
await db_wrapper1.close()

View File

@ -143,7 +143,7 @@ async def make_db(db_file: Path, blocks: List[FullBlock]) -> None:
for block in blocks:
results = PreValidationResult(None, uint64(1), None, False)
result, err, _ = await bc.receive_block(block, results)
result, err, _ = await bc.add_block(block, results)
assert err is None
finally:
await db_wrapper.close()

View File

@ -127,9 +127,7 @@ class TestROM:
cost, r = run_generator_unsafe(gen, max_cost=MAX_COST)
print(r)
npc_result = get_name_puzzle_conditions(
gen, max_cost=MAX_COST, cost_per_byte=COST_PER_BYTE, mempool_mode=False, height=softfork_height
)
npc_result = get_name_puzzle_conditions(gen, max_cost=MAX_COST, mempool_mode=False, height=softfork_height)
assert npc_result.error is None
assert npc_result.cost == EXPECTED_COST + ConditionCost.CREATE_COIN.value + (
len(bytes(gen.program)) * COST_PER_BYTE

View File

@ -157,12 +157,7 @@ class TestPoolPuzzles(TestCase):
sig,
)
# Spend it!
coin_db.update_coin_store_for_spend_bundle(
spend_bundle,
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
)
coin_db.update_coin_store_for_spend_bundle(spend_bundle, time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM)
# Test that we can retrieve the extra data
assert get_delayed_puz_info_from_launcher_spend(launcher_coinsol) == (DELAY_TIME, DELAY_PH)
assert solution_to_pool_state(launcher_coinsol) == pool_state
@ -181,10 +176,7 @@ class TestPoolPuzzles(TestCase):
)
# Spend it!
fork_coin_db.update_coin_store_for_spend_bundle(
SpendBundle([post_launch_coinsol], G2Element()),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle([post_launch_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# HONEST ABSORB
@ -217,10 +209,7 @@ class TestPoolPuzzles(TestCase):
)
# Spend it!
coin_db.update_coin_store_for_spend_bundle(
SpendBundle(coin_sols, G2Element()),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle(coin_sols, G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# ABSORB A NON EXISTENT REWARD (Negative test)
@ -251,10 +240,7 @@ class TestPoolPuzzles(TestCase):
BadSpendBundleError, match="condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED"
):
coin_db.update_coin_store_for_spend_bundle(
SpendBundle([singleton_coinsol], G2Element()),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle([singleton_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# SPEND A NON-REWARD P2_SINGLETON (Negative test)
@ -281,10 +267,7 @@ class TestPoolPuzzles(TestCase):
BadSpendBundleError, match="condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED"
):
coin_db.update_coin_store_for_spend_bundle(
SpendBundle([singleton_coinsol, bad_coinsol], G2Element()),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle([singleton_coinsol, bad_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# ENTER WAITING ROOM
@ -310,10 +293,7 @@ class TestPoolPuzzles(TestCase):
)
# Spend it!
coin_db.update_coin_store_for_spend_bundle(
SpendBundle([travel_coinsol], sig),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle([travel_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# ESCAPE TOO FAST (Negative test)
@ -337,10 +317,7 @@ class TestPoolPuzzles(TestCase):
# Spend it and hope it fails!
with pytest.raises(BadSpendBundleError, match="condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED"):
coin_db.update_coin_store_for_spend_bundle(
SpendBundle([return_coinsol], sig),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle([return_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# ABSORB WHILE IN WAITING ROOM
@ -359,10 +336,7 @@ class TestPoolPuzzles(TestCase):
)
# Spend it!
coin_db.update_coin_store_for_spend_bundle(
SpendBundle(coin_sols, G2Element()),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle(coin_sols, G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# LEAVE THE WAITING ROOM
@ -395,10 +369,7 @@ class TestPoolPuzzles(TestCase):
)
# Spend it!
coin_db.update_coin_store_for_spend_bundle(
SpendBundle([return_coinsol], sig),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle([return_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)
# ABSORB ONCE MORE FOR GOOD MEASURE
@ -416,8 +387,5 @@ class TestPoolPuzzles(TestCase):
)
# Spend it!
coin_db.update_coin_store_for_spend_bundle(
SpendBundle(coin_sols, G2Element()),
time,
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
DEFAULT_CONSTANTS.COST_PER_BYTE,
SpendBundle(coin_sols, G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
)

View File

@ -12,7 +12,7 @@ from chia.util.ints import uint32
def run_and_get_removals_and_additions(
block: FullBlock, max_cost: int, *, cost_per_byte: int, height: uint32, mempool_mode=False
block: FullBlock, max_cost: int, *, height: uint32, mempool_mode=False
) -> Tuple[List[bytes32], List[Coin]]:
removals: List[bytes32] = []
additions: List[Coin] = []
@ -23,11 +23,7 @@ def run_and_get_removals_and_additions(
if block.transactions_generator is not None:
npc_result = get_name_puzzle_conditions(
BlockGenerator(block.transactions_generator, [], []),
max_cost,
cost_per_byte=cost_per_byte,
mempool_mode=mempool_mode,
height=height,
BlockGenerator(block.transactions_generator, [], []), max_cost, mempool_mode=mempool_mode, height=height
)
assert npc_result.error is None
rem, add = tx_removals_and_additions(npc_result.conds)

View File

@ -62,7 +62,7 @@ class TestCATWallet:
)
# The next 2 lines are basically a noop, it just adds test coverage
cat_wallet = await CATWallet.create(wallet_node.wallet_state_manager, wallet, cat_wallet.wallet_info)
await wallet_node.wallet_state_manager.add_new_wallet(cat_wallet, cat_wallet.id())
await wallet_node.wallet_state_manager.add_new_wallet(cat_wallet)
tx_queue: List[TransactionRecord] = await wallet_node.wallet_state_manager.tx_store.get_not_sent()
tx_record = tx_queue[0]

View File

@ -42,7 +42,9 @@ class TestCATTrades:
"reuse_puzhash",
[True, False],
)
async def test_cat_trades(self, wallets_prefarm, forwards_compat: bool, reuse_puzhash: bool):
async def test_cat_trades(
self, wallets_prefarm, forwards_compat: bool, reuse_puzhash: bool, softfork_height: uint32
):
(
[wallet_node_maker, initial_maker_balance],
[wallet_node_taker, initial_taker_balance],
@ -575,7 +577,9 @@ class TestCATTrades:
# (and therefore are solved as a complete ring)
bundle = Offer.aggregate([first_offer, second_offer, third_offer, fourth_offer, fifth_offer]).to_valid_spend()
program = simple_solution_generator(bundle)
result: NPCResult = get_name_puzzle_conditions(program, INFINITE_COST, cost_per_byte=0, mempool_mode=True)
result: NPCResult = get_name_puzzle_conditions(
program, INFINITE_COST, mempool_mode=True, height=softfork_height
)
assert result.error is None
@pytest.mark.asyncio

View File

@ -1,4 +1,4 @@
from __future__ import annotations
job_timeout = 45
job_timeout = 70
checkout_blocks_and_plots = True

View File

@ -47,7 +47,7 @@ async def farm_blocks_until(
async def get_nft_count(wallet: NFTWallet) -> int:
return len(await wallet.get_current_nfts())
return await wallet.get_nft_count()
@pytest.mark.parametrize(
@ -162,8 +162,7 @@ async def test_nft_offer_sell_nft(
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
assert await nft_wallet_taker.get_nft_count() == 0
nft_to_offer = coins_maker[0]
nft_to_offer_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
@ -209,9 +208,7 @@ async def test_nft_offer_sell_nft(
assert tx_records is not None
async def maker_0_taker_1() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 0 and len(await nft_wallet_taker.get_current_nfts()) == 1
)
return await nft_wallet_maker.get_nft_count() == 0 and await nft_wallet_taker.get_nft_count() == 1
await farm_blocks_until(maker_0_taker_1, full_node_api, ph_token)
await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_maker, wallet_node_taker], timeout=20)
@ -337,11 +334,10 @@ async def test_nft_offer_request_nft(
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 0
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 1
assert await nft_wallet_maker.get_nft_count() == 0
nft_to_request = coins_taker[0]
nft_to_request_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_request.full_puzzle))
@ -388,9 +384,7 @@ async def test_nft_offer_request_nft(
await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_maker, wallet_node_taker], timeout=20)
async def maker_1_taker_0() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 1 and len(await nft_wallet_taker.get_current_nfts()) == 0
)
return await nft_wallet_maker.get_nft_count() == 1 and await nft_wallet_taker.get_nft_count() == 0
await farm_blocks_until(maker_1_taker_0, full_node_api, ph_token)
@ -532,12 +526,9 @@ async def test_nft_offer_sell_did_to_did(
# maker create offer: NFT for xch
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
assert await nft_wallet_taker.get_nft_count() == 0
nft_to_offer = coins_maker[0]
nft_to_offer_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_to_offer_asset_id: bytes32 = create_asset_id(nft_to_offer_info) # type: ignore
@ -580,9 +571,9 @@ async def test_nft_offer_sell_did_to_did(
async def maker_0_taker_1() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 0
await nft_wallet_maker.get_nft_count() == 0
and len(wallet_taker.wallet_state_manager.wallets) == 4
and len(await wallet_taker.wallet_state_manager.wallets[4].get_current_nfts()) == 1
and await wallet_taker.wallet_state_manager.wallets[4].get_nft_count() == 1
)
await farm_blocks_until(maker_0_taker_1, full_node_api, ph_token)
@ -591,7 +582,7 @@ async def test_nft_offer_sell_did_to_did(
# assert nnew nft wallet is created for taker
await time_out_assert(20, len, 4, wallet_taker.wallet_state_manager.wallets)
await time_out_assert(20, get_nft_count, 1, wallet_taker.wallet_state_manager.wallets[4])
assert (await wallet_taker.wallet_state_manager.wallets[4].get_current_nfts())[0].nft_id == nft_to_offer_asset_id
assert await wallet_taker.wallet_state_manager.wallets[4].nft_store.get_nft_by_id(nft_to_offer_asset_id) is not None
# assert payments and royalties
expected_royalty = uint64(xch_requested * royalty_basis_pts / 10000)
expected_maker_balance = funds - 2 - maker_fee + xch_requested + expected_royalty
@ -1276,7 +1267,6 @@ async def test_nft_offer_sell_cancel_in_batch(self_hostname: str, two_wallet_nod
(True, (200, 500, 500)),
(False, (200, 500, 500)),
(False, (0, 0, 0)), # test that we can have 0 royalty
(False, (65000, 65534, 65535)), # test that we can reach max royalty
(False, (10000, 10001, 10005)), # tests 100% royalty is not allowed
(False, (100000, 10001, 10005)), # 1000% shouldn't work
],
@ -1302,7 +1292,6 @@ async def test_complex_nft_offer(
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
@ -1323,10 +1312,7 @@ async def test_complex_nft_offer(
for i in range(0, 2):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
if royalty_pts[0] > 60000:
blocks_needed = 9
else:
blocks_needed = 3
blocks_needed = 3
if not forwards_compat:
for i in range(blocks_needed):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
@ -1352,7 +1338,6 @@ async def test_complex_nft_offer(
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds_maker)
await time_out_assert(30, wallet_taker.get_unconfirmed_balance, funds_taker)
await time_out_assert(30, wallet_taker.get_confirmed_balance, funds_taker)
CAT_AMOUNT = uint64(100000000)
async with wsm_maker.lock:
cat_wallet_maker: CATWallet = await CATWallet.create_new_cat_wallet(
@ -1408,7 +1393,6 @@ async def test_complex_nft_offer(
await time_out_assert(30, cat_wallet_maker.get_unconfirmed_balance, CAT_AMOUNT)
await time_out_assert(30, cat_wallet_taker.get_confirmed_balance, CAT_AMOUNT)
await time_out_assert(30, cat_wallet_taker.get_unconfirmed_balance, CAT_AMOUNT)
did_id_maker = bytes32.fromhex(did_wallet_maker.get_my_DID())
did_id_taker = bytes32.fromhex(did_wallet_taker.get_my_DID())
target_puzhash_maker = ph_maker
@ -1500,9 +1484,14 @@ async def test_complex_nft_offer(
nft_to_offer_asset_id_maker: bytes32 = maker_nfts[0].nft_id
nft_to_offer_asset_id_taker_1: bytes32 = taker_nfts[0].nft_id
nft_to_offer_asset_id_taker_2: bytes32 = taker_nfts[1].nft_id
XCH_REQUESTED = 2000000000000
CAT_REQUESTED = 100000
FEE = uint64(2000000000000)
if royalty_basis_pts_maker > 60000:
XCH_REQUESTED = 20000
CAT_REQUESTED = 1000
FEE = uint64(20000)
else:
XCH_REQUESTED = 2000000000000
CAT_REQUESTED = 100000
FEE = uint64(2000000000000)
complex_nft_offer = {
nft_to_offer_asset_id_maker: -1,
@ -1621,8 +1610,8 @@ async def test_complex_nft_offer(
)
maker_nfts = await basic_nft_wallet_maker.get_current_nfts()
taker_nfts = await basic_nft_wallet_taker.get_current_nfts()
await time_out_assert(30, len, 2, maker_nfts)
await time_out_assert(30, len, 1, taker_nfts)
assert len(maker_nfts) == 2
assert len(taker_nfts) == 1
assert nft_to_offer_asset_id_maker == taker_nfts[0].nft_id
assert nft_to_offer_asset_id_taker_1 in [nft.nft_id for nft in maker_nfts]
@ -1701,9 +1690,6 @@ async def test_complex_nft_offer(
cat_wallet_maker.get_asset_id(),
wsm_taker,
)
maker_nfts = await basic_nft_wallet_maker.get_current_nfts()
taker_nfts = await basic_nft_wallet_taker.get_current_nfts()
await time_out_assert(30, len, 3, maker_nfts)
await time_out_assert(30, len, 0, taker_nfts)
assert nft_to_offer_asset_id_maker in [nft.nft_id for nft in maker_nfts]
assert await basic_nft_wallet_maker.get_nft_count() == 3
assert await basic_nft_wallet_taker.get_nft_count() == 0
assert await basic_nft_wallet_maker.nft_store.get_nft_by_id(nft_to_offer_asset_id_maker) is not None

View File

@ -1,6 +1,5 @@
from __future__ import annotations
import asyncio
from secrets import token_bytes
from typing import Any, Dict
@ -27,8 +26,7 @@ from chia.wallet.util.address_type import AddressType
async def nft_count(wallet: NFTWallet) -> int:
nfts = await wallet.nft_store.get_nft_list()
return len(nfts)
return await wallet.get_nft_count()
@pytest.mark.parametrize(
@ -297,14 +295,12 @@ async def test_nft_mint_from_did_rpc(
for sb in spends:
resp = await client_node.push_tx(sb)
assert resp["success"]
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await asyncio.sleep(2)
await full_node_api.process_spend_bundles([sb])
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
async def get_taker_nfts() -> int:
nfts = (await api_taker.nft_get_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["nft_list"]
return len(nfts)
return int((await api_taker.nft_count_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["count"])
# We are using a long time out here because it can take a long time for the NFTs to show up
# Even with only 10 NFTs it regularly takes longer than 30-40s for them to be found
@ -481,14 +477,12 @@ async def test_nft_mint_from_did_rpc_no_royalties(
for sb in spends:
resp = await client_node.push_tx(sb)
assert resp["success"]
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await asyncio.sleep(2)
await full_node_api.process_spend_bundles([sb])
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
async def get_taker_nfts() -> int:
nfts = (await api_taker.nft_get_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["nft_list"]
return len(nfts)
return int((await api_taker.nft_count_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["count"])
await time_out_assert(60, get_taker_nfts, n)
@ -865,14 +859,12 @@ async def test_nft_mint_from_xch_rpc(
for sb in spends:
resp = await client_node.push_tx(sb)
assert resp["success"]
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await asyncio.sleep(2)
await full_node_api.process_spend_bundles([sb])
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
async def get_taker_nfts() -> int:
nfts = (await api_taker.nft_get_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["nft_list"]
return len(nfts)
return int((await api_taker.nft_count_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["count"])
# We are using a long time out here because it can take a long time for the NFTs to show up
# Even with only 10 NFTs it regularly takes longer than 30-40s for them to be found

File diff suppressed because one or more lines are too long

View File

@ -62,7 +62,7 @@ async def wait_rpc_state_condition(
f"timed out while waiting for {async_function.__name__}(): {elapsed} >= {timeout}",
)
await asyncio.sleep(0.5)
await asyncio.sleep(0.3)
async def make_new_block_with(resp: Dict, full_node_api: FullNodeSimulator, ph: bytes32) -> SpendBundle:
@ -164,10 +164,8 @@ async def test_nft_wallet_creation_automatically(self_hostname: str, two_wallet_
await time_out_assert(30, get_nft_count, 0, nft_wallet_0)
await time_out_assert(30, get_nft_count, 1, nft_wallet_1)
coins = await nft_wallet_0.get_current_nfts()
assert len(coins) == 0
coins = await nft_wallet_1.get_current_nfts()
assert len(coins) == 1
assert await nft_wallet_0.get_nft_count() == 0
assert await nft_wallet_1.get_nft_count() == 1
@pytest.mark.parametrize(
@ -1326,8 +1324,7 @@ async def test_nft_bulk_set_did(self_hostname: str, two_wallet_nodes: Any, trust
[dict(wallet_id=nft_wallet_1_id)],
lambda x: len(x["nft_list"]) > 1 and x["nft_list"][0].owner_did,
)
assert len(await wallet_node_0.wallet_state_manager.wallets[nft_wallet_0_id].get_current_nfts()) == 2
assert await wallet_node_0.wallet_state_manager.wallets[nft_wallet_0_id].get_nft_count() == 2
coins = resp["nft_list"]
assert len(coins) == 2
assert coins[0].owner_did.hex() == hex_did_id

View File

@ -34,7 +34,6 @@ ANYONE_CAN_SPEND_WITH_PADDING_PUZZLE_HASH = Program.to(binutils.assemble("(a (q
POOL_REWARD_PREFIX_MAINNET = bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000")
MAX_BLOCK_COST_CLVM = int(1e18)
COST_PER_BYTE = int(12000)
class PuzzleDB:
@ -418,9 +417,7 @@ def spend_coin_to_singleton(
coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions)
spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())])
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
launcher_coin = launcher_spend_bundle.coin_spends[0].coin
@ -486,7 +483,7 @@ def test_lifecycle_with_coinstore_as_wallet():
#######
# spend coin to a singleton
additions, removals = spend_coin_to_singleton(PUZZLE_DB, LAUNCHER_PUZZLE, coin_store, now)
_, removals = spend_coin_to_singleton(PUZZLE_DB, LAUNCHER_PUZZLE, coin_store, now)
assert len(list(coin_store.all_unspent_coins())) == 1
@ -521,9 +518,7 @@ def test_lifecycle_with_coinstore_as_wallet():
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=singleton_conditions)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
now.seconds += 500
now.height += 1
@ -552,9 +547,7 @@ def test_lifecycle_with_coinstore_as_wallet():
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=singleton_conditions)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
now.seconds += 500
now.height += 1
@ -605,9 +598,7 @@ def test_lifecycle_with_coinstore_as_wallet():
spend_bundle = SpendBundle([singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
assert len(list(coin_store.all_unspent_coins())) == 1
@ -643,9 +634,7 @@ def test_lifecycle_with_coinstore_as_wallet():
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
spend_bundle.debug()
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
now.seconds += 500
now.height += 1
@ -661,9 +650,7 @@ def test_lifecycle_with_coinstore_as_wallet():
)
spend_bundle = SpendBundle([coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
now.seconds += 500
now.height += 1
change_count = SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
@ -700,9 +687,7 @@ def test_lifecycle_with_coinstore_as_wallet():
)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
now.seconds += 500
now.height += 1
@ -730,9 +715,7 @@ def test_lifecycle_with_coinstore_as_wallet():
PUZZLE_DB.add_puzzle(full_puzzle)
try:
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
assert 0
except BadSpendBundleError as ex:
assert ex.args[0] == "condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED"
@ -740,9 +723,7 @@ def test_lifecycle_with_coinstore_as_wallet():
now.seconds += 350000
now.height += 1445
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
@ -757,9 +738,7 @@ def test_lifecycle_with_coinstore_as_wallet():
spend_bundle = SpendBundle([coin_spend], G2Element())
spend_bundle.debug()
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
_, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
update_count = SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert update_count == 0

View File

@ -4,7 +4,7 @@ import dataclasses
import pytest
from chia.consensus.blockchain import ReceiveBlockResult
from chia.consensus.blockchain import AddBlockResult
from chia.protocols import full_node_protocol
from chia.simulator.block_tools import test_constants
from chia.types.blockchain_format.vdf import VDFProof
@ -75,26 +75,26 @@ class TestWalletBlockchain:
res, err = await chain.receive_block(header_blocks[50])
print(res, err)
assert res == ReceiveBlockResult.DISCONNECTED_BLOCK
assert res == AddBlockResult.DISCONNECTED_BLOCK
res, err = await chain.receive_block(header_blocks[400])
print(res, err)
assert res == ReceiveBlockResult.ALREADY_HAVE_BLOCK
assert res == AddBlockResult.ALREADY_HAVE_BLOCK
res, err = await chain.receive_block(header_blocks[507])
print(res, err)
assert res == ReceiveBlockResult.DISCONNECTED_BLOCK
assert res == AddBlockResult.DISCONNECTED_BLOCK
res, err = await chain.receive_block(
dataclasses.replace(header_blocks[506], challenge_chain_ip_proof=VDFProof(2, b"123", True))
)
assert res == ReceiveBlockResult.INVALID_BLOCK
assert res == AddBlockResult.INVALID_BLOCK
assert (await chain.get_peak_block()).height == 505
for block in header_blocks[506:]:
res, err = await chain.receive_block(block)
assert res == ReceiveBlockResult.NEW_PEAK
assert res == AddBlockResult.NEW_PEAK
assert (await chain.get_peak_block()).height == block.height
assert (await chain.get_peak_block()).height == 999

View File

@ -231,42 +231,6 @@ async def test_get_records_by_parent_id() -> None:
assert await store.get_coin_records_by_parent_id(coin_7.parent_coin_info) == [record_7]
@pytest.mark.asyncio
async def test_get_multiple_coin_records() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
await store.add_coin_record(record_2)
await store.add_coin_record(record_3)
await store.add_coin_record(record_4)
await store.add_coin_record(record_5)
await store.add_coin_record(record_6)
await store.add_coin_record(record_7)
assert set(await store.get_multiple_coin_records([coin_1.name(), coin_2.name(), coin_3.name()])) == set(
[record_1, record_2, record_3]
)
assert set(await store.get_multiple_coin_records([coin_5.name(), coin_6.name(), coin_7.name()])) == set(
[record_5, record_6, record_7]
)
assert set(
await store.get_multiple_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
coin_6.name(),
coin_7.name(),
]
)
) == set([record_1, record_2, record_3, record_4, record_5, record_6, record_7])
@pytest.mark.asyncio
async def test_delete_coin_record() -> None:
async with DBConnection(1) as db_wrapper:
@ -281,17 +245,19 @@ async def test_delete_coin_record() -> None:
await store.add_coin_record(record_7)
assert set(
await store.get_multiple_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
coin_6.name(),
coin_7.name(),
]
)
(
await store.get_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
coin_6.name(),
coin_7.name(),
]
)
).values()
) == set([record_1, record_2, record_3, record_4, record_5, record_6, record_7])
assert await store.get_coin_record(coin_1.name()) == record_1
@ -300,9 +266,11 @@ async def test_delete_coin_record() -> None:
assert await store.get_coin_record(coin_1.name()) is None
assert set(
await store.get_multiple_coin_records(
[coin_2.name(), coin_3.name(), coin_4.name(), coin_5.name(), coin_6.name(), coin_7.name()]
)
(
await store.get_coin_records(
[coin_2.name(), coin_3.name(), coin_4.name(), coin_5.name(), coin_6.name(), coin_7.name()]
)
).values()
) == set([record_2, record_3, record_4, record_5, record_6, record_7])
@ -353,15 +321,17 @@ async def test_rollback_to_block() -> None:
await store.add_coin_record(r5)
assert set(
await store.get_multiple_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
]
)
(
await store.get_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
]
)
).values()
) == set(
[
r1,