mirror of
https://github.com/Chia-Network/chia-blockchain.git
synced 2024-11-11 01:28:17 +03:00
Merge branch 'main' into altendky-data_layer_exploration
This commit is contained in:
commit
c969e63188
106
.github/workflows/build-test-macos-wallet-simple_sync.yml
vendored
Normal file
106
.github/workflows/build-test-macos-wallet-simple_sync.yml
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
#
|
||||
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
|
||||
#
|
||||
name: MacOS wallet-simple_sync Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- '**'
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: MacOS wallet-simple_sync Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: [3.8, 3.9]
|
||||
os: [macOS-latest]
|
||||
|
||||
steps:
|
||||
- name: Cancel previous runs on the same branch
|
||||
if: ${{ github.ref != 'refs/heads/main' }}
|
||||
uses: styfle/cancel-workflow-action@0.9.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python environment
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Create keychain for CI use
|
||||
run: |
|
||||
security create-keychain -p foo chiachain
|
||||
security default-keychain -s chiachain
|
||||
security unlock-keychain -p foo chiachain
|
||||
security set-keychain-settings -t 7200 -u chiachain
|
||||
|
||||
- name: Get pip cache dir
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
# Note that new runners may break this https://github.com/actions/cache/issues/292
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Checkout test blocks and plots
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'Chia-Network/test-cache'
|
||||
path: '.chia'
|
||||
ref: '0.27.0'
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Link home directory
|
||||
run: |
|
||||
cd $HOME
|
||||
ln -s $GITHUB_WORKSPACE/.chia
|
||||
echo "$HOME/.chia"
|
||||
ls -al $HOME/.chia
|
||||
|
||||
- name: Run install script
|
||||
env:
|
||||
INSTALL_PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
BUILD_VDF_CLIENT: "N"
|
||||
run: |
|
||||
brew install boost
|
||||
sh install.sh
|
||||
|
||||
- name: Install timelord
|
||||
run: |
|
||||
. ./activate
|
||||
sh install-timelord.sh
|
||||
./vdf_bench square_asm 400000
|
||||
|
||||
- name: Install developer requirements
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/python -m pip install pytest pytest-asyncio pytest-xdist
|
||||
|
||||
- name: Test wallet-simple_sync code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
./venv/bin/py.test tests/wallet/simple_sync/test_simple_sync_protocol.py -s -v --durations 0
|
||||
#
|
||||
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
|
||||
#
|
113
.github/workflows/build-test-ubuntu-wallet-simple_sync.yml
vendored
Normal file
113
.github/workflows/build-test-ubuntu-wallet-simple_sync.yml
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
#
|
||||
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
|
||||
#
|
||||
name: Ubuntu wallet-simple_sync Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- '**'
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Ubuntu wallet-simple_sync Test
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: [3.7, 3.8, 3.9]
|
||||
os: [ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- name: Cancel previous runs on the same branch
|
||||
if: ${{ github.ref != 'refs/heads/main' }}
|
||||
uses: styfle/cancel-workflow-action@0.9.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python environment
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Cache npm
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
|
||||
- name: Get pip cache dir
|
||||
id: pip-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Checkout test blocks and plots
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'Chia-Network/test-cache'
|
||||
path: '.chia'
|
||||
ref: '0.27.0'
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Link home directory
|
||||
run: |
|
||||
cd $HOME
|
||||
ln -s $GITHUB_WORKSPACE/.chia
|
||||
echo "$HOME/.chia"
|
||||
ls -al $HOME/.chia
|
||||
|
||||
- name: Install ubuntu dependencies
|
||||
run: |
|
||||
sudo apt-get install software-properties-common
|
||||
sudo add-apt-repository ppa:deadsnakes/ppa
|
||||
sudo apt-get update
|
||||
sudo apt-get install python${{ matrix.python-version }}-venv python${{ matrix.python-version }}-distutils git -y
|
||||
|
||||
- name: Run install script
|
||||
env:
|
||||
INSTALL_PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
sh install.sh
|
||||
|
||||
- name: Install timelord
|
||||
run: |
|
||||
. ./activate
|
||||
sh install-timelord.sh
|
||||
./vdf_bench square_asm 400000
|
||||
|
||||
- name: Install developer requirements
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/python -m pip install pytest pytest-asyncio pytest-xdist pytest-monitor
|
||||
|
||||
- name: Test wallet-simple_sync code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
./venv/bin/py.test tests/wallet/simple_sync/test_simple_sync_protocol.py -s -v --durations 0
|
||||
|
||||
|
||||
#
|
||||
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
|
||||
#
|
@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html)
|
||||
for setuptools_scm/PEP 440 reasons.
|
||||
|
||||
## 1.2.7 Chia blockchain 2021-09-16
|
||||
|
||||
### Fixed
|
||||
|
||||
- Thanks to @jack60612 for fixing a bug that displayed 25 words instead of 24 words in some instances in the GUI.
|
||||
|
||||
## 1.2.6 Chia blockchain 2021-09-09
|
||||
|
||||
Today we’re releasing version 1.2.6 to address a resource bug with nodes, and we want to stress the importance of updating to it at the earliest convenience. The fix prevents a node from consuming excessive memory when many Bluebox Timelords are active on the chain.
|
||||
@ -25,7 +31,7 @@ Today we’re releasing version 1.2.6 to address a resource bug with nodes, and
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed errors in the Linux GUI install script, which impacted only Linux users.
|
||||
- Fixed errors in the Linux GUI install script, which impacted only Linux users.
|
||||
|
||||
## 1.2.4 Chia blockchain 2021-08-26
|
||||
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 0ef042ac3aa8cb6f1181f2ed70776c173fe17707
|
||||
Subproject commit ec64bddae3c286a54c5d54936b7a845dea544817
|
@ -47,7 +47,7 @@ def monkey_patch_click() -> None:
|
||||
@click.option(
|
||||
"--keys-root-path", default=DEFAULT_KEYS_ROOT_PATH, help="Keyring file root", type=click.Path(), show_default=True
|
||||
)
|
||||
@click.option("--passphrase-file", type=click.File("r"), help="File or descriptor to read the keyring passphase from")
|
||||
@click.option("--passphrase-file", type=click.File("r"), help="File or descriptor to read the keyring passphrase from")
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx: click.Context,
|
||||
|
@ -18,6 +18,7 @@ def configure(
|
||||
set_outbound_peer_count: str,
|
||||
set_peer_count: str,
|
||||
testnet: str,
|
||||
peer_connect_timeout: str,
|
||||
):
|
||||
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
|
||||
change_made = False
|
||||
@ -145,6 +146,10 @@ def configure(
|
||||
else:
|
||||
print("Please choose True or False")
|
||||
|
||||
if peer_connect_timeout is not None:
|
||||
config["full_node"]["peer_connect_timeout"] = int(peer_connect_timeout)
|
||||
change_made = True
|
||||
|
||||
if change_made:
|
||||
print("Restart any running chia services for changes to take effect")
|
||||
save_config(root_path, "config.yaml", config)
|
||||
@ -190,6 +195,7 @@ def configure(
|
||||
type=str,
|
||||
)
|
||||
@click.option("--set-peer-count", help="Update the target peer count (default 80)", type=str)
|
||||
@click.option("--set-peer-connect-timeout", help="Update the peer connect timeout (default 30)", type=str)
|
||||
@click.pass_context
|
||||
def configure_cmd(
|
||||
ctx,
|
||||
@ -202,6 +208,7 @@ def configure_cmd(
|
||||
set_outbound_peer_count,
|
||||
set_peer_count,
|
||||
testnet,
|
||||
set_peer_connect_timeout,
|
||||
):
|
||||
configure(
|
||||
ctx.obj["root_path"],
|
||||
@ -214,4 +221,5 @@ def configure_cmd(
|
||||
set_outbound_peer_count,
|
||||
set_peer_count,
|
||||
testnet,
|
||||
set_peer_connect_timeout,
|
||||
)
|
||||
|
@ -30,8 +30,6 @@ from chia.types.unfinished_block import UnfinishedBlock
|
||||
from chia.util import cached_bls
|
||||
from chia.util.condition_tools import (
|
||||
pkm_pairs_for_conditions_dict,
|
||||
coin_announcements_names_for_npc,
|
||||
puzzle_announcements_names_for_npc,
|
||||
)
|
||||
from chia.util.errors import Err
|
||||
from chia.util.generator_tools import (
|
||||
@ -159,8 +157,6 @@ async def validate_block_body(
|
||||
removals: List[bytes32] = []
|
||||
coinbase_additions: List[Coin] = list(expected_reward_coins)
|
||||
additions: List[Coin] = []
|
||||
coin_announcement_names: Set[bytes32] = set()
|
||||
puzzle_announcement_names: Set[bytes32] = set()
|
||||
npc_list: List[NPC] = []
|
||||
removals_puzzle_dic: Dict[bytes32, bytes32] = {}
|
||||
cost: uint64 = uint64(0)
|
||||
@ -223,8 +219,6 @@ async def validate_block_body(
|
||||
removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash
|
||||
|
||||
additions = additions_for_npc(npc_list)
|
||||
coin_announcement_names = coin_announcements_names_for_npc(npc_list)
|
||||
puzzle_announcement_names = puzzle_announcements_names_for_npc(npc_list)
|
||||
else:
|
||||
assert npc_result is None
|
||||
|
||||
@ -325,7 +319,6 @@ async def validate_block_body(
|
||||
min(constants.MAX_BLOCK_COST_CLVM, curr.transactions_info.cost),
|
||||
cost_per_byte=constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=curr.height > constants.RUST_CONDITION_CHECKER,
|
||||
)
|
||||
removals_in_curr, additions_in_curr = tx_removals_and_additions(curr_npc_result.npc_list)
|
||||
else:
|
||||
@ -450,8 +443,6 @@ async def validate_block_body(
|
||||
unspent = removal_coin_records[npc.coin_name]
|
||||
error = mempool_check_conditions_dict(
|
||||
unspent,
|
||||
coin_announcement_names,
|
||||
puzzle_announcement_names,
|
||||
npc.condition_dict,
|
||||
prev_transaction_block_height,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
|
@ -131,7 +131,6 @@ def create_foliage(
|
||||
constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=constants.COST_PER_BYTE,
|
||||
safe_mode=True,
|
||||
rust_checker=height > constants.RUST_CONDITION_CHECKER,
|
||||
)
|
||||
cost = calculate_cost_of_program(block_generator.program, result, constants.COST_PER_BYTE)
|
||||
|
||||
|
@ -164,7 +164,7 @@ class Blockchain(BlockchainInterface):
|
||||
block: FullBlock,
|
||||
pre_validation_result: Optional[PreValidationResult] = None,
|
||||
fork_point_with_peak: Optional[uint32] = None,
|
||||
) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
|
||||
) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32], List[CoinRecord]]:
|
||||
"""
|
||||
This method must be called under the blockchain lock
|
||||
Adds a new block into the blockchain, if it's valid and connected to the current
|
||||
@ -174,17 +174,18 @@ class Blockchain(BlockchainInterface):
|
||||
"""
|
||||
genesis: bool = block.height == 0
|
||||
if self.contains_block(block.header_hash):
|
||||
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None
|
||||
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None, []
|
||||
|
||||
if not self.contains_block(block.prev_header_hash) and not genesis:
|
||||
return (
|
||||
ReceiveBlockResult.DISCONNECTED_BLOCK,
|
||||
Err.INVALID_PREV_BLOCK_HASH,
|
||||
None,
|
||||
[],
|
||||
)
|
||||
|
||||
if not genesis and (self.block_record(block.prev_header_hash).height + 1) != block.height:
|
||||
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None
|
||||
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None, []
|
||||
|
||||
npc_result: Optional[NPCResult] = None
|
||||
if pre_validation_result is None:
|
||||
@ -201,14 +202,13 @@ class Blockchain(BlockchainInterface):
|
||||
try:
|
||||
block_generator: Optional[BlockGenerator] = await self.get_block_generator(block)
|
||||
except ValueError:
|
||||
return ReceiveBlockResult.INVALID_BLOCK, Err.GENERATOR_REF_HAS_NO_GENERATOR, None
|
||||
return ReceiveBlockResult.INVALID_BLOCK, Err.GENERATOR_REF_HAS_NO_GENERATOR, None, []
|
||||
assert block_generator is not None and block.transactions_info is not None
|
||||
npc_result = get_name_puzzle_conditions(
|
||||
block_generator,
|
||||
min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
|
||||
cost_per_byte=self.constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=block.height > self.constants.RUST_CONDITION_CHECKER,
|
||||
)
|
||||
removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
|
||||
else:
|
||||
@ -228,7 +228,7 @@ class Blockchain(BlockchainInterface):
|
||||
)
|
||||
|
||||
if error is not None:
|
||||
return ReceiveBlockResult.INVALID_BLOCK, error.code, None
|
||||
return ReceiveBlockResult.INVALID_BLOCK, error.code, None, []
|
||||
else:
|
||||
npc_result = pre_validation_result.npc_result
|
||||
required_iters = pre_validation_result.required_iters
|
||||
@ -247,7 +247,7 @@ class Blockchain(BlockchainInterface):
|
||||
self.get_block_generator,
|
||||
)
|
||||
if error_code is not None:
|
||||
return ReceiveBlockResult.INVALID_BLOCK, error_code, None
|
||||
return ReceiveBlockResult.INVALID_BLOCK, error_code, None, []
|
||||
|
||||
block_record = block_to_block_record(
|
||||
self.constants,
|
||||
@ -263,7 +263,7 @@ class Blockchain(BlockchainInterface):
|
||||
# Perform the DB operations to update the state, and rollback if something goes wrong
|
||||
await self.block_store.db_wrapper.begin_transaction()
|
||||
await self.block_store.add_full_block(header_hash, block, block_record)
|
||||
fork_height, peak_height, records = await self._reconsider_peak(
|
||||
fork_height, peak_height, records, coin_record_change = await self._reconsider_peak(
|
||||
block_record, genesis, fork_point_with_peak, npc_result
|
||||
)
|
||||
await self.block_store.db_wrapper.commit_transaction()
|
||||
@ -282,10 +282,13 @@ class Blockchain(BlockchainInterface):
|
||||
self.block_store.rollback_cache_block(header_hash)
|
||||
await self.block_store.db_wrapper.rollback_transaction()
|
||||
raise
|
||||
|
||||
if fork_height is not None:
|
||||
return ReceiveBlockResult.NEW_PEAK, None, fork_height
|
||||
# new coin records added
|
||||
assert coin_record_change is not None
|
||||
return ReceiveBlockResult.NEW_PEAK, None, fork_height, coin_record_change
|
||||
else:
|
||||
return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
|
||||
return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None, []
|
||||
|
||||
async def _reconsider_peak(
|
||||
self,
|
||||
@ -293,7 +296,7 @@ class Blockchain(BlockchainInterface):
|
||||
genesis: bool,
|
||||
fork_point_with_peak: Optional[uint32],
|
||||
npc_result: Optional[NPCResult],
|
||||
) -> Tuple[Optional[uint32], Optional[uint32], List[BlockRecord]]:
|
||||
) -> Tuple[Optional[uint32], Optional[uint32], List[BlockRecord], List[CoinRecord]]:
|
||||
"""
|
||||
When a new block is added, this is called, to check if the new block is the new peak of the chain.
|
||||
This also handles reorgs by reverting blocks which are not in the heaviest chain.
|
||||
@ -301,6 +304,7 @@ class Blockchain(BlockchainInterface):
|
||||
None if there was no update to the heaviest chain.
|
||||
"""
|
||||
peak = self.get_peak()
|
||||
lastest_coin_state: Dict[bytes32, CoinRecord] = {}
|
||||
if genesis:
|
||||
if peak is None:
|
||||
block: Optional[FullBlock] = await self.block_store.get_full_block(block_record.header_hash)
|
||||
@ -310,10 +314,20 @@ class Blockchain(BlockchainInterface):
|
||||
tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
|
||||
else:
|
||||
tx_removals, tx_additions = [], []
|
||||
await self.coin_store.new_block(block, tx_additions, tx_removals)
|
||||
if block.is_transaction_block():
|
||||
assert block.foliage_transaction_block is not None
|
||||
added, _ = await self.coin_store.new_block(
|
||||
block.height,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
block.get_included_reward_coins(),
|
||||
tx_additions,
|
||||
tx_removals,
|
||||
)
|
||||
else:
|
||||
added, _ = [], []
|
||||
await self.block_store.set_peak(block_record.header_hash)
|
||||
return uint32(0), uint32(0), [block_record]
|
||||
return None, None, []
|
||||
return uint32(0), uint32(0), [block_record], added
|
||||
return None, None, [], []
|
||||
|
||||
assert peak is not None
|
||||
if block_record.weight > peak.weight:
|
||||
@ -327,7 +341,10 @@ class Blockchain(BlockchainInterface):
|
||||
fork_height = find_fork_point_in_chain(self, block_record, peak)
|
||||
|
||||
if block_record.prev_hash != peak.header_hash:
|
||||
await self.coin_store.rollback_to_block(fork_height)
|
||||
roll_changes: List[CoinRecord] = await self.coin_store.rollback_to_block(fork_height)
|
||||
for coin_record in roll_changes:
|
||||
lastest_coin_state[coin_record.name] = coin_record
|
||||
|
||||
# Rollback sub_epoch_summaries
|
||||
heights_to_delete = []
|
||||
for ses_included_height in self.__sub_epoch_summaries.keys():
|
||||
@ -362,14 +379,29 @@ class Blockchain(BlockchainInterface):
|
||||
)
|
||||
else:
|
||||
tx_removals, tx_additions = await self.get_tx_removals_and_additions(fetched_full_block, None)
|
||||
await self.coin_store.new_block(fetched_full_block, tx_additions, tx_removals)
|
||||
if fetched_full_block.is_transaction_block():
|
||||
assert fetched_full_block.foliage_transaction_block is not None
|
||||
removed_rec, added_rec = await self.coin_store.new_block(
|
||||
fetched_full_block.height,
|
||||
fetched_full_block.foliage_transaction_block.timestamp,
|
||||
fetched_full_block.get_included_reward_coins(),
|
||||
tx_additions,
|
||||
tx_removals,
|
||||
)
|
||||
|
||||
# Set additions first, than removals in order to handle ephemeral coin state
|
||||
# Add in height order is also required
|
||||
for record in added_rec:
|
||||
lastest_coin_state[record.name] = record
|
||||
for record in removed_rec:
|
||||
lastest_coin_state[record.name] = record
|
||||
|
||||
# Changes the peak to be the new peak
|
||||
await self.block_store.set_peak(block_record.header_hash)
|
||||
return uint32(max(fork_height, 0)), block_record.height, records_to_add
|
||||
return uint32(max(fork_height, 0)), block_record.height, records_to_add, list(lastest_coin_state.values())
|
||||
|
||||
# This is not a heavier block than the heaviest we have seen, so we don't change the coin set
|
||||
return None, None, []
|
||||
return None, None, [], list(lastest_coin_state.values())
|
||||
|
||||
async def get_tx_removals_and_additions(
|
||||
self, block: FullBlock, npc_result: Optional[NPCResult] = None
|
||||
@ -384,7 +416,6 @@ class Blockchain(BlockchainInterface):
|
||||
self.constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=self.constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=block.height > self.constants.RUST_CONDITION_CHECKER,
|
||||
)
|
||||
tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
|
||||
return tx_removals, tx_additions
|
||||
@ -537,7 +568,6 @@ class Blockchain(BlockchainInterface):
|
||||
min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
|
||||
cost_per_byte=self.constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=uint32(prev_height + 1) > self.constants.RUST_CONDITION_CHECKER,
|
||||
)
|
||||
error_code, cost_result = await validate_block_body(
|
||||
self.constants,
|
||||
|
@ -54,7 +54,6 @@ class ConsensusConstants:
|
||||
WEIGHT_PROOF_THRESHOLD: uint8
|
||||
WEIGHT_PROOF_RECENT_BLOCKS: uint32
|
||||
MAX_BLOCK_COUNT_PER_REQUESTS: uint32
|
||||
RUST_CONDITION_CHECKER: uint64
|
||||
BLOCKS_CACHE_SIZE: uint32
|
||||
NETWORK_TYPE: int
|
||||
MAX_GENERATOR_SIZE: uint32
|
||||
|
@ -50,7 +50,6 @@ testnet_kwargs = {
|
||||
"BLOCKS_CACHE_SIZE": 4608 + (128 * 4),
|
||||
"WEIGHT_PROOF_RECENT_BLOCKS": 1000,
|
||||
"MAX_BLOCK_COUNT_PER_REQUESTS": 32, # Allow up to 32 blocks per request
|
||||
"RUST_CONDITION_CHECKER": 730000 + 138000,
|
||||
"NETWORK_TYPE": 0,
|
||||
"MAX_GENERATOR_SIZE": 1000000,
|
||||
"MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list
|
||||
|
@ -82,7 +82,6 @@ def batch_pre_validate_blocks(
|
||||
min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
|
||||
cost_per_byte=constants.COST_PER_BYTE,
|
||||
safe_mode=True,
|
||||
rust_checker=block.height > constants.RUST_CONDITION_CHECKER,
|
||||
)
|
||||
removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
|
||||
|
||||
|
@ -27,7 +27,7 @@ from chia.util.config import load_config
|
||||
from chia.util.json_util import dict_to_json_str
|
||||
from chia.util.keychain import (
|
||||
Keychain,
|
||||
KeyringCurrentPassphaseIsInvalid,
|
||||
KeyringCurrentPassphraseIsInvalid,
|
||||
KeyringRequiresMigration,
|
||||
passphrase_requirements,
|
||||
supports_keyring_passphrase,
|
||||
@ -141,6 +141,7 @@ class WebSocketServer:
|
||||
self.net_config = load_config(root_path, "config.yaml")
|
||||
self.self_hostname = self.net_config["self_hostname"]
|
||||
self.daemon_port = self.net_config["daemon_port"]
|
||||
self.daemon_max_message_size = self.net_config.get("daemon_max_message_size", 50 * 1000 * 1000)
|
||||
self.websocket_server = None
|
||||
self.ssl_context = ssl_context_for_server(ca_crt_path, ca_key_path, crt_path, key_path, log=self.log)
|
||||
self.shut_down = False
|
||||
@ -163,7 +164,7 @@ class WebSocketServer:
|
||||
self.safe_handle,
|
||||
self.self_hostname,
|
||||
self.daemon_port,
|
||||
max_size=50 * 1000 * 1000,
|
||||
max_size=self.daemon_max_message_size,
|
||||
ping_interval=500,
|
||||
ping_timeout=300,
|
||||
ssl=self.ssl_context,
|
||||
@ -467,7 +468,7 @@ class WebSocketServer:
|
||||
Keychain.set_master_passphrase(current_passphrase, new_passphrase, allow_migration=False)
|
||||
except KeyringRequiresMigration:
|
||||
error = "keyring requires migration"
|
||||
except KeyringCurrentPassphaseIsInvalid:
|
||||
except KeyringCurrentPassphraseIsInvalid:
|
||||
error = "current passphrase is invalid"
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
@ -494,7 +495,7 @@ class WebSocketServer:
|
||||
|
||||
try:
|
||||
Keychain.remove_master_passphrase(current_passphrase)
|
||||
except KeyringCurrentPassphaseIsInvalid:
|
||||
except KeyringCurrentPassphraseIsInvalid:
|
||||
error = "current passphrase is invalid"
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
@ -808,8 +809,10 @@ class WebSocketServer:
|
||||
}
|
||||
return response
|
||||
|
||||
ids: List[str] = []
|
||||
for k in range(count):
|
||||
id = str(uuid.uuid4())
|
||||
ids.append(id)
|
||||
config = {
|
||||
"id": id,
|
||||
"size": size,
|
||||
@ -830,7 +833,7 @@ class WebSocketServer:
|
||||
# notify GUI about new plot queue item
|
||||
self.state_changed(service_plotter, self.prepare_plot_state_message(PlotEvent.STATE_CHANGED, id))
|
||||
|
||||
# only first item can start when user selected serial plotting
|
||||
# only the first item can start when user selected serial plotting
|
||||
can_start_serial_plotting = k == 0 and self._is_serial_plotting_running(queue) is False
|
||||
|
||||
if parallel is True or can_start_serial_plotting:
|
||||
@ -842,6 +845,7 @@ class WebSocketServer:
|
||||
|
||||
response = {
|
||||
"success": True,
|
||||
"ids": ids,
|
||||
"service_name": service_name,
|
||||
}
|
||||
|
||||
|
@ -28,8 +28,6 @@ class BlockStore:
|
||||
# All full blocks which have been added to the blockchain. Header_hash -> block
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db = db_wrapper.db
|
||||
await self.db.execute("pragma journal_mode=wal")
|
||||
await self.db.execute("pragma synchronous=2")
|
||||
await self.db.execute(
|
||||
"CREATE TABLE IF NOT EXISTS full_blocks(header_hash text PRIMARY KEY, height bigint,"
|
||||
" is_block tinyint, is_fully_compactified tinyint, block blob)"
|
||||
|
@ -1,14 +1,16 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from typing import List, Optional, Set, Tuple, Dict
|
||||
import aiosqlite
|
||||
|
||||
from chia.protocols.wallet_protocol import CoinState
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.coin_record import CoinRecord
|
||||
from chia.types.full_block import FullBlock
|
||||
from chia.util.db_wrapper import DBWrapper
|
||||
from chia.util.ints import uint32, uint64
|
||||
from chia.util.lru_cache import LRUCache
|
||||
from time import time
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CoinStore:
|
||||
@ -29,8 +31,8 @@ class CoinStore:
|
||||
self.cache_size = cache_size
|
||||
self.db_wrapper = db_wrapper
|
||||
self.coin_record_db = db_wrapper.db
|
||||
await self.coin_record_db.execute("pragma journal_mode=wal")
|
||||
await self.coin_record_db.execute("pragma synchronous=2")
|
||||
# the coin_name is unique in this table because the CoinStore always
|
||||
# only represent a single peak
|
||||
await self.coin_record_db.execute(
|
||||
(
|
||||
"CREATE TABLE IF NOT EXISTS coin_record("
|
||||
@ -57,31 +59,42 @@ class CoinStore:
|
||||
|
||||
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_puzzle_hash on coin_record(puzzle_hash)")
|
||||
|
||||
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_parent_index on coin_record(coin_parent)")
|
||||
|
||||
await self.coin_record_db.commit()
|
||||
self.coin_record_cache = LRUCache(cache_size)
|
||||
return self
|
||||
|
||||
async def new_block(self, block: FullBlock, tx_additions: List[Coin], tx_removals: List[bytes32]):
|
||||
async def new_block(
|
||||
self,
|
||||
height: uint32,
|
||||
timestamp: uint64,
|
||||
included_reward_coins: Set[Coin],
|
||||
tx_additions: List[Coin],
|
||||
tx_removals: List[bytes32],
|
||||
) -> Tuple[List[CoinRecord], List[CoinRecord]]:
|
||||
"""
|
||||
Only called for blocks which are blocks (and thus have rewards and transactions)
|
||||
"""
|
||||
if block.is_transaction_block() is False:
|
||||
return None
|
||||
assert block.foliage_transaction_block is not None
|
||||
|
||||
start = time()
|
||||
|
||||
added_coin_records = []
|
||||
removed_coin_records = []
|
||||
|
||||
for coin in tx_additions:
|
||||
record: CoinRecord = CoinRecord(
|
||||
coin,
|
||||
block.height,
|
||||
height,
|
||||
uint32(0),
|
||||
False,
|
||||
False,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
timestamp,
|
||||
)
|
||||
added_coin_records.append(record)
|
||||
await self._add_coin_record(record, False)
|
||||
|
||||
included_reward_coins = block.get_included_reward_coins()
|
||||
if block.height == 0:
|
||||
if height == 0:
|
||||
assert len(included_reward_coins) == 0
|
||||
else:
|
||||
assert len(included_reward_coins) >= 2
|
||||
@ -89,20 +102,31 @@ class CoinStore:
|
||||
for coin in included_reward_coins:
|
||||
reward_coin_r: CoinRecord = CoinRecord(
|
||||
coin,
|
||||
block.height,
|
||||
height,
|
||||
uint32(0),
|
||||
False,
|
||||
True,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
timestamp,
|
||||
)
|
||||
added_coin_records.append(reward_coin_r)
|
||||
await self._add_coin_record(reward_coin_r, False)
|
||||
|
||||
total_amount_spent: int = 0
|
||||
for coin_name in tx_removals:
|
||||
total_amount_spent += await self._set_spent(coin_name, block.height)
|
||||
|
||||
removed_coin_record = await self._set_spent(coin_name, height)
|
||||
total_amount_spent += removed_coin_record.coin.amount
|
||||
removed_coin_records.append(removed_coin_record)
|
||||
# Sanity check, already checked in block_body_validation
|
||||
assert sum([a.amount for a in tx_additions]) <= total_amount_spent
|
||||
end = time()
|
||||
if end - start > 10:
|
||||
log.warning(
|
||||
f"It took {end - start:0.2}s to apply {len(tx_additions)} additions and "
|
||||
+ f"{len(tx_removals)} removals to the coin store. Make sure "
|
||||
+ "blockchain database is on a fast drive"
|
||||
)
|
||||
|
||||
return removed_coin_records, added_coin_records
|
||||
|
||||
# Checks DB and DiffStores for CoinRecord with coin_name and returns it
|
||||
async def get_coin_record(self, coin_name: bytes32) -> Optional[CoinRecord]:
|
||||
@ -130,6 +154,9 @@ class CoinStore:
|
||||
return coins
|
||||
|
||||
async def get_coins_removed_at_height(self, height: uint32) -> List[CoinRecord]:
|
||||
# Special case to avoid querying all unspent coins (spent_index=0)
|
||||
if height == 0:
|
||||
return []
|
||||
cursor = await self.coin_record_db.execute("SELECT * from coin_record WHERE spent_index=?", (height,))
|
||||
rows = await cursor.fetchall()
|
||||
await cursor.close()
|
||||
@ -212,13 +239,44 @@ class CoinStore:
|
||||
f"{'' if include_spent_coins else 'AND spent=0'}",
|
||||
names_db + (start_height, end_height),
|
||||
)
|
||||
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
await cursor.close()
|
||||
for row in rows:
|
||||
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
|
||||
coins.add(CoinRecord(coin, row[1], row[2], row[3], row[4], row[8]))
|
||||
|
||||
return list(coins)
|
||||
|
||||
async def get_coin_states_by_puzzle_hashes(
|
||||
self,
|
||||
include_spent_coins: bool,
|
||||
puzzle_hashes: List[bytes32],
|
||||
start_height: uint32 = uint32(0),
|
||||
end_height: uint32 = uint32((2 ** 32) - 1),
|
||||
) -> List[CoinState]:
|
||||
if len(puzzle_hashes) == 0:
|
||||
return []
|
||||
|
||||
coins = set()
|
||||
puzzle_hashes_db = tuple([ph.hex() for ph in puzzle_hashes])
|
||||
cursor = await self.coin_record_db.execute(
|
||||
f'SELECT * from coin_record WHERE puzzle_hash in ({"?," * (len(puzzle_hashes_db) - 1)}?) '
|
||||
f"AND confirmed_index>=? AND confirmed_index<? "
|
||||
f"{'' if include_spent_coins else 'AND spent=0'}",
|
||||
puzzle_hashes_db + (start_height, end_height),
|
||||
)
|
||||
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
await cursor.close()
|
||||
for row in rows:
|
||||
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
|
||||
spent_h = None
|
||||
if row[3]:
|
||||
spent_h = row[2]
|
||||
coins.add(CoinState(coin, spent_h, row[1]))
|
||||
|
||||
return list(coins)
|
||||
|
||||
async def get_coin_records_by_parent_ids(
|
||||
@ -248,9 +306,40 @@ class CoinStore:
|
||||
coins.add(CoinRecord(coin, row[1], row[2], row[3], row[4], row[8]))
|
||||
return list(coins)
|
||||
|
||||
async def rollback_to_block(self, block_index: int):
|
||||
async def get_coin_state_by_ids(
|
||||
self,
|
||||
include_spent_coins: bool,
|
||||
coin_ids: List[bytes32],
|
||||
start_height: uint32 = uint32(0),
|
||||
end_height: uint32 = uint32((2 ** 32) - 1),
|
||||
) -> List[CoinState]:
|
||||
if len(coin_ids) == 0:
|
||||
return []
|
||||
|
||||
coins = set()
|
||||
parent_ids_db = tuple([pid.hex() for pid in coin_ids])
|
||||
cursor = await self.coin_record_db.execute(
|
||||
f'SELECT * from coin_record WHERE coin_name in ({"?," * (len(parent_ids_db) - 1)}?) '
|
||||
f"AND confirmed_index>=? AND confirmed_index<? "
|
||||
f"{'' if include_spent_coins else 'AND spent=0'}",
|
||||
parent_ids_db + (start_height, end_height),
|
||||
)
|
||||
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
await cursor.close()
|
||||
for row in rows:
|
||||
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
|
||||
spent_h = None
|
||||
if row[3]:
|
||||
spent_h = row[2]
|
||||
coins.add(CoinState(coin, spent_h, row[1]))
|
||||
return list(coins)
|
||||
|
||||
async def rollback_to_block(self, block_index: int) -> List[CoinRecord]:
|
||||
"""
|
||||
Note that block_index can be negative, in which case everything is rolled back
|
||||
Returns the list of coin records that have been modified
|
||||
"""
|
||||
# Update memory cache
|
||||
delete_queue: bytes32 = []
|
||||
@ -271,14 +360,38 @@ class CoinStore:
|
||||
for coin_name in delete_queue:
|
||||
self.coin_record_cache.remove(coin_name)
|
||||
|
||||
coin_changes: Dict[bytes32, CoinRecord] = {}
|
||||
cursor_deleted = await self.coin_record_db.execute(
|
||||
"SELECT * FROM coin_record WHERE confirmed_index>?", (block_index,)
|
||||
)
|
||||
rows = await cursor_deleted.fetchall()
|
||||
for row in rows:
|
||||
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
|
||||
record = CoinRecord(coin, uint32(0), row[2], row[3], row[4], uint64(0))
|
||||
coin_changes[record.name] = record
|
||||
await cursor_deleted.close()
|
||||
|
||||
# Delete from storage
|
||||
c1 = await self.coin_record_db.execute("DELETE FROM coin_record WHERE confirmed_index>?", (block_index,))
|
||||
await c1.close()
|
||||
|
||||
cursor_unspent = await self.coin_record_db.execute(
|
||||
"SELECT * FROM coin_record WHERE confirmed_index>?", (block_index,)
|
||||
)
|
||||
rows = await cursor_unspent.fetchall()
|
||||
for row in rows:
|
||||
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
|
||||
record = CoinRecord(coin, row[1], uint32(0), False, row[4], row[8])
|
||||
if record.name not in coin_changes:
|
||||
coin_changes[record.name] = record
|
||||
await cursor_unspent.close()
|
||||
|
||||
c2 = await self.coin_record_db.execute(
|
||||
"UPDATE coin_record SET spent_index = 0, spent = 0 WHERE spent_index>?",
|
||||
(block_index,),
|
||||
)
|
||||
await c2.close()
|
||||
return list(coin_changes.values())
|
||||
|
||||
# Store CoinRecord in DB and ram cache
|
||||
async def _add_coin_record(self, record: CoinRecord, allow_replace: bool) -> None:
|
||||
@ -302,7 +415,7 @@ class CoinStore:
|
||||
await cursor.close()
|
||||
|
||||
# Update coin_record to be spent in DB
|
||||
async def _set_spent(self, coin_name: bytes32, index: uint32) -> uint64:
|
||||
async def _set_spent(self, coin_name: bytes32, index: uint32) -> CoinRecord:
|
||||
current: Optional[CoinRecord] = await self.get_coin_record(coin_name)
|
||||
if current is None:
|
||||
raise ValueError(f"Cannot spend a coin that does not exist in db: {coin_name}")
|
||||
@ -317,4 +430,4 @@ class CoinStore:
|
||||
current.timestamp,
|
||||
) # type: ignore # noqa
|
||||
await self._add_coin_record(spent, True)
|
||||
return current.coin.amount
|
||||
return spent
|
||||
|
@ -36,6 +36,7 @@ from chia.protocols.full_node_protocol import (
|
||||
RespondSignagePoint,
|
||||
)
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes
|
||||
from chia.protocols.wallet_protocol import CoinState, CoinStateUpdate
|
||||
from chia.server.node_discovery import FullNodePeers
|
||||
from chia.server.outbound_message import Message, NodeType, make_msg
|
||||
from chia.server.server import ChiaServer
|
||||
@ -44,6 +45,7 @@ from chia.types.blockchain_format.pool_target import PoolTarget
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
|
||||
from chia.types.blockchain_format.vdf import CompressibleVDFField, VDFInfo, VDFProof
|
||||
from chia.types.coin_record import CoinRecord
|
||||
from chia.types.end_of_slot_bundle import EndOfSubSlotBundle
|
||||
from chia.types.full_block import FullBlock
|
||||
from chia.types.header_block import HeaderBlock
|
||||
@ -111,6 +113,11 @@ class FullNode:
|
||||
|
||||
db_path_replaced: str = config["database_path"].replace("CHALLENGE", config["selected_network"])
|
||||
self.db_path = path_from_root(root_path, db_path_replaced)
|
||||
self.coin_subscriptions: Dict[bytes32, Set[bytes32]] = {} # Puzzle Hash : Set[Peer ID]
|
||||
self.ph_subscriptions: Dict[bytes32, Set[bytes32]] = {} # Puzzle Hash : Set[Peer ID]
|
||||
self.peer_coin_ids: Dict[bytes32, Set[bytes32]] = {} # Peer ID: Set[Coin ids]
|
||||
self.peer_puzzle_hash: Dict[bytes32, Set[bytes32]] = {} # Peer ID: Set[puzzle_hash]
|
||||
self.peer_sub_counter: Dict[bytes32, int] = {} # Peer ID: int (subscription count)
|
||||
mkdir(self.db_path.parent)
|
||||
|
||||
def _set_state_changed_callback(self, callback: Callable):
|
||||
@ -122,6 +129,8 @@ class FullNode:
|
||||
self.new_peak_sem = asyncio.Semaphore(8)
|
||||
# create the store (db) and full node instance
|
||||
self.connection = await aiosqlite.connect(self.db_path)
|
||||
await self.connection.execute("pragma journal_mode=wal")
|
||||
await self.connection.execute("pragma synchronous=NORMAL")
|
||||
if self.config.get("log_sqlite_cmds", False):
|
||||
sql_log_path = path_from_root(self.root_path, "log/sql.log")
|
||||
self.log.info(f"logging SQL commands to {sql_log_path}")
|
||||
@ -164,7 +173,7 @@ class FullNode:
|
||||
peak: Optional[BlockRecord] = self.blockchain.get_peak()
|
||||
if peak is not None:
|
||||
full_peak = await self.blockchain.get_full_peak()
|
||||
await self.peak_post_processing(full_peak, peak, max(peak.height - 1, 0), None)
|
||||
await self.peak_post_processing(full_peak, peak, max(peak.height - 1, 0), None, [])
|
||||
if self.config["send_uncompact_interval"] != 0:
|
||||
sanitize_weight_proof_only = False
|
||||
if "sanitize_weight_proof_only" in self.config:
|
||||
@ -279,14 +288,16 @@ class FullNode:
|
||||
if not response:
|
||||
raise ValueError(f"Error short batch syncing, invalid/no response for {height}-{end_height}")
|
||||
async with self.blockchain.lock:
|
||||
success, advanced_peak, fork_height = await self.receive_block_batch(response.blocks, peer, None)
|
||||
success, advanced_peak, fork_height, coin_changes = await self.receive_block_batch(
|
||||
response.blocks, peer, None
|
||||
)
|
||||
if not success:
|
||||
raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}")
|
||||
if advanced_peak:
|
||||
peak = self.blockchain.get_peak()
|
||||
peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak()
|
||||
assert peak is not None and peak_fb is not None and fork_height is not None
|
||||
await self.peak_post_processing(peak_fb, peak, fork_height, peer)
|
||||
await self.peak_post_processing(peak_fb, peak, fork_height, peer, coin_changes)
|
||||
self.log.info(f"Added blocks {height}-{end_height}")
|
||||
except Exception:
|
||||
self.sync_store.batch_syncing.remove(peer.peer_node_id)
|
||||
@ -558,6 +569,27 @@ class FullNode:
|
||||
self._state_changed("sync_mode")
|
||||
if self.sync_store is not None:
|
||||
self.sync_store.peer_disconnected(connection.peer_node_id)
|
||||
self.remove_subscriptions(connection)
|
||||
|
||||
def remove_subscriptions(self, peer: ws.WSChiaConnection):
|
||||
# Remove all ph | coin id subscription for this peer
|
||||
node_id = peer.peer_node_id
|
||||
if node_id in self.peer_puzzle_hash:
|
||||
puzzle_hashes = self.peer_puzzle_hash[node_id]
|
||||
for ph in puzzle_hashes:
|
||||
if ph in self.ph_subscriptions:
|
||||
if node_id in self.ph_subscriptions[ph]:
|
||||
self.ph_subscriptions[ph].remove(node_id)
|
||||
|
||||
if node_id in self.peer_coin_ids:
|
||||
coin_ids = self.peer_coin_ids[node_id]
|
||||
for coin_id in coin_ids:
|
||||
if coin_id in self.coin_subscriptions:
|
||||
if node_id in self.coin_subscriptions[coin_id]:
|
||||
self.coin_subscriptions[coin_id].remove(node_id)
|
||||
|
||||
if peer.peer_node_id in self.peer_sub_counter:
|
||||
self.peer_sub_counter.pop(peer.peer_node_id)
|
||||
|
||||
def _num_needed_peers(self) -> int:
|
||||
assert self.server is not None
|
||||
@ -779,7 +811,7 @@ class FullNode:
|
||||
peer, blocks = res
|
||||
start_height = blocks[0].height
|
||||
end_height = blocks[-1].height
|
||||
success, advanced_peak, _ = await self.receive_block_batch(
|
||||
success, advanced_peak, fork_height, coin_states = await self.receive_block_batch(
|
||||
blocks, peer, None if advanced_peak else uint32(fork_point_height), summaries
|
||||
)
|
||||
if success is False:
|
||||
@ -789,6 +821,9 @@ class FullNode:
|
||||
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
|
||||
self.log.info(f"Added blocks {start_height} to {end_height}")
|
||||
await self.send_peak_to_wallets()
|
||||
peak = self.blockchain.get_peak()
|
||||
if len(coin_states) > 0 and fork_height is not None:
|
||||
await self.update_wallets(peak.height, fork_height, peak.header_hash, coin_states)
|
||||
self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
@ -823,13 +858,41 @@ class FullNode:
|
||||
peers_with_peak: List = [c for c in self.server.all_connections.values() if c.peer_node_id in peer_ids]
|
||||
return peers_with_peak
|
||||
|
||||
async def update_wallets(
|
||||
self, height: uint32, fork_height: uint32, peak_hash: bytes32, state_update: List[CoinRecord]
|
||||
):
|
||||
changes_for_peer: Dict[bytes32, Set[CoinState]] = {}
|
||||
|
||||
for coin_record in state_update:
|
||||
if coin_record.name in self.coin_subscriptions:
|
||||
subscribed_peers = self.coin_subscriptions[coin_record.name]
|
||||
for peer in subscribed_peers:
|
||||
if peer not in changes_for_peer:
|
||||
changes_for_peer[peer] = set()
|
||||
changes_for_peer[peer].add(coin_record.coin_state)
|
||||
|
||||
if coin_record.coin.puzzle_hash in self.ph_subscriptions:
|
||||
subscribed_peers = self.ph_subscriptions[coin_record.coin.puzzle_hash]
|
||||
for peer in subscribed_peers:
|
||||
if peer not in changes_for_peer:
|
||||
changes_for_peer[peer] = set()
|
||||
changes_for_peer[peer].add(coin_record.coin_state)
|
||||
|
||||
for peer, changes in changes_for_peer.items():
|
||||
if peer not in self.server.all_connections:
|
||||
continue
|
||||
ws_peer: ws.WSChiaConnection = self.server.all_connections[peer]
|
||||
state = CoinStateUpdate(height, fork_height, peak_hash, list(changes))
|
||||
msg = make_msg(ProtocolMessageTypes.coin_state_update, state)
|
||||
await ws_peer.send_message(msg)
|
||||
|
||||
async def receive_block_batch(
|
||||
self,
|
||||
all_blocks: List[FullBlock],
|
||||
peer: ws.WSChiaConnection,
|
||||
fork_point: Optional[uint32],
|
||||
wp_summaries: Optional[List[SubEpochSummary]] = None,
|
||||
) -> Tuple[bool, bool, Optional[uint32]]:
|
||||
) -> Tuple[bool, bool, Optional[uint32], List[CoinRecord]]:
|
||||
advanced_peak = False
|
||||
fork_height: Optional[uint32] = uint32(0)
|
||||
|
||||
@ -839,7 +902,7 @@ class FullNode:
|
||||
blocks_to_validate = all_blocks[i:]
|
||||
break
|
||||
if len(blocks_to_validate) == 0:
|
||||
return True, False, fork_height
|
||||
return True, False, fork_height, []
|
||||
|
||||
pre_validate_start = time.time()
|
||||
pre_validation_results: Optional[
|
||||
@ -847,17 +910,17 @@ class FullNode:
|
||||
] = await self.blockchain.pre_validate_blocks_multiprocessing(blocks_to_validate, {}, wp_summaries=wp_summaries)
|
||||
self.log.debug(f"Block pre-validation time: {time.time() - pre_validate_start}")
|
||||
if pre_validation_results is None:
|
||||
return False, False, None
|
||||
return False, False, None, []
|
||||
for i, block in enumerate(blocks_to_validate):
|
||||
if pre_validation_results[i].error is not None:
|
||||
self.log.error(
|
||||
f"Invalid block from peer: {peer.get_peer_logging()} {Err(pre_validation_results[i].error)}"
|
||||
)
|
||||
return False, advanced_peak, fork_height
|
||||
return False, advanced_peak, fork_height, []
|
||||
|
||||
for i, block in enumerate(blocks_to_validate):
|
||||
assert pre_validation_results[i].required_iters is not None
|
||||
(result, error, fork_height,) = await self.blockchain.receive_block(
|
||||
result, error, fork_height, coin_changes = await self.blockchain.receive_block(
|
||||
block, pre_validation_results[i], None if advanced_peak else fork_point
|
||||
)
|
||||
if result == ReceiveBlockResult.NEW_PEAK:
|
||||
@ -865,7 +928,7 @@ class FullNode:
|
||||
elif result == ReceiveBlockResult.INVALID_BLOCK or result == ReceiveBlockResult.DISCONNECTED_BLOCK:
|
||||
if error is not None:
|
||||
self.log.error(f"Error: {error}, Invalid block from peer: {peer.get_peer_logging()} ")
|
||||
return False, advanced_peak, fork_height
|
||||
return False, advanced_peak, fork_height, []
|
||||
block_record = self.blockchain.block_record(block.header_hash)
|
||||
if block_record.sub_epoch_summary_included is not None:
|
||||
if self.weight_proof_handler is not None:
|
||||
@ -876,7 +939,7 @@ class FullNode:
|
||||
f"Total time for {len(blocks_to_validate)} blocks: {time.time() - pre_validate_start}, "
|
||||
f"advanced: {advanced_peak}"
|
||||
)
|
||||
return True, advanced_peak, fork_height
|
||||
return True, advanced_peak, fork_height, coin_changes
|
||||
|
||||
async def _finish_sync(self):
|
||||
"""
|
||||
@ -896,7 +959,7 @@ class FullNode:
|
||||
|
||||
peak_fb: FullBlock = await self.blockchain.get_full_peak()
|
||||
if peak is not None:
|
||||
await self.peak_post_processing(peak_fb, peak, max(peak.height - 1, 0), None)
|
||||
await self.peak_post_processing(peak_fb, peak, max(peak.height - 1, 0), None, [])
|
||||
|
||||
if peak is not None and self.weight_proof_handler is not None:
|
||||
await self.weight_proof_handler.get_proof_of_weight(peak.header_hash)
|
||||
@ -974,7 +1037,12 @@ class FullNode:
|
||||
await self.server.send_to_all([msg], NodeType.FARMER)
|
||||
|
||||
async def peak_post_processing(
|
||||
self, block: FullBlock, record: BlockRecord, fork_height: uint32, peer: Optional[ws.WSChiaConnection]
|
||||
self,
|
||||
block: FullBlock,
|
||||
record: BlockRecord,
|
||||
fork_height: uint32,
|
||||
peer: Optional[ws.WSChiaConnection],
|
||||
coin_changes: List[CoinRecord],
|
||||
):
|
||||
"""
|
||||
Must be called under self.blockchain.lock. This updates the internal state of the full node with the
|
||||
@ -1109,6 +1177,7 @@ class FullNode:
|
||||
fork_height,
|
||||
),
|
||||
)
|
||||
await self.update_wallets(record.height, fork_height, record.header_hash, coin_changes)
|
||||
await self.server.send_to_all([msg], NodeType.WALLET)
|
||||
|
||||
# Check if we detected a spent transaction, to load up our generator cache
|
||||
@ -1186,7 +1255,7 @@ class FullNode:
|
||||
)
|
||||
# This recursion ends here, we cannot recurse again because transactions_generator is not None
|
||||
return await self.respond_block(block_response, peer)
|
||||
|
||||
coin_changes: List[CoinRecord] = []
|
||||
async with self.blockchain.lock:
|
||||
# After acquiring the lock, check again, because another asyncio thread might have added it
|
||||
if self.blockchain.contains_block(header_hash):
|
||||
@ -1214,7 +1283,10 @@ class FullNode:
|
||||
pre_validation_results[0] if pre_validation_result is None else pre_validation_result
|
||||
)
|
||||
assert result_to_validate.required_iters == pre_validation_results[0].required_iters
|
||||
added, error_code, fork_height = await self.blockchain.receive_block(block, result_to_validate, None)
|
||||
added, error_code, fork_height, coin_changes = await self.blockchain.receive_block(
|
||||
block, result_to_validate, None
|
||||
)
|
||||
|
||||
if (
|
||||
self.full_node_store.previous_generator is not None
|
||||
and fork_height is not None
|
||||
@ -1238,7 +1310,7 @@ class FullNode:
|
||||
new_peak: Optional[BlockRecord] = self.blockchain.get_peak()
|
||||
assert new_peak is not None and fork_height is not None
|
||||
|
||||
await self.peak_post_processing(block, new_peak, fork_height, peer)
|
||||
await self.peak_post_processing(block, new_peak, fork_height, peer, coin_changes)
|
||||
|
||||
elif added == ReceiveBlockResult.ADDED_AS_ORPHAN:
|
||||
self.log.info(
|
||||
|
@ -18,7 +18,7 @@ from chia.full_node.signage_point import SignagePoint
|
||||
from chia.protocols import farmer_protocol, full_node_protocol, introducer_protocol, timelord_protocol, wallet_protocol
|
||||
from chia.protocols.full_node_protocol import RejectBlock, RejectBlocks
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes
|
||||
from chia.protocols.wallet_protocol import PuzzleSolutionResponse, RejectHeaderBlocks, RejectHeaderRequest
|
||||
from chia.protocols.wallet_protocol import PuzzleSolutionResponse, RejectHeaderBlocks, RejectHeaderRequest, CoinState
|
||||
from chia.server.outbound_message import Message, make_msg
|
||||
from chia.types.blockchain_format.coin import Coin, hash_coin_list
|
||||
from chia.types.blockchain_format.pool_target import PoolTarget
|
||||
@ -32,7 +32,7 @@ from chia.types.mempool_inclusion_status import MempoolInclusionStatus
|
||||
from chia.types.mempool_item import MempoolItem
|
||||
from chia.types.peer_info import PeerInfo
|
||||
from chia.types.unfinished_block import UnfinishedBlock
|
||||
from chia.util.api_decorators import api_request, peer_required, bytes_required, execute_task
|
||||
from chia.util.api_decorators import api_request, peer_required, bytes_required, execute_task, reply_type
|
||||
from chia.util.generator_tools import get_block_header
|
||||
from chia.util.hash import std_hash
|
||||
from chia.util.ints import uint8, uint32, uint64, uint128
|
||||
@ -62,6 +62,7 @@ class FullNodeAPI:
|
||||
|
||||
@peer_required
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_peers])
|
||||
async def request_peers(self, _request: full_node_protocol.RequestPeers, peer: ws.WSChiaConnection):
|
||||
if peer.peer_server_port is None:
|
||||
return None
|
||||
@ -189,6 +190,7 @@ class FullNodeAPI:
|
||||
return None
|
||||
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_transaction])
|
||||
async def request_transaction(self, request: full_node_protocol.RequestTransaction) -> Optional[Message]:
|
||||
"""Peer has requested a full transaction from us."""
|
||||
# Ignore if syncing
|
||||
@ -227,6 +229,7 @@ class FullNodeAPI:
|
||||
return None
|
||||
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_proof_of_weight])
|
||||
async def request_proof_of_weight(self, request: full_node_protocol.RequestProofOfWeight) -> Optional[Message]:
|
||||
if self.full_node.weight_proof_handler is None:
|
||||
return None
|
||||
@ -272,6 +275,7 @@ class FullNodeAPI:
|
||||
return None
|
||||
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_block, ProtocolMessageTypes.reject_block])
|
||||
async def request_block(self, request: full_node_protocol.RequestBlock) -> Optional[Message]:
|
||||
if not self.full_node.blockchain.contains_height(request.height):
|
||||
reject = RejectBlock(request.height)
|
||||
@ -288,6 +292,7 @@ class FullNodeAPI:
|
||||
return msg
|
||||
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_blocks, ProtocolMessageTypes.reject_blocks])
|
||||
async def request_blocks(self, request: full_node_protocol.RequestBlocks) -> Optional[Message]:
|
||||
if request.end_height < request.start_height or request.end_height - request.start_height > 32:
|
||||
reject = RejectBlocks(request.start_height, request.end_height)
|
||||
@ -399,6 +404,7 @@ class FullNodeAPI:
|
||||
return msg
|
||||
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_unfinished_block])
|
||||
async def request_unfinished_block(
|
||||
self, request_unfinished_block: full_node_protocol.RequestUnfinishedBlock
|
||||
) -> Optional[Message]:
|
||||
@ -509,6 +515,7 @@ class FullNodeAPI:
|
||||
return make_msg(ProtocolMessageTypes.request_signage_point_or_end_of_sub_slot, full_node_request)
|
||||
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_signage_point, ProtocolMessageTypes.respond_end_of_sub_slot])
|
||||
async def request_signage_point_or_end_of_sub_slot(
|
||||
self, request: full_node_protocol.RequestSignagePointOrEndOfSubSlot
|
||||
) -> Optional[Message]:
|
||||
@ -1300,6 +1307,7 @@ class FullNodeAPI:
|
||||
|
||||
@peer_required
|
||||
@api_request
|
||||
@reply_type([ProtocolMessageTypes.respond_compact_vdf])
|
||||
async def request_compact_vdf(self, request: full_node_protocol.RequestCompactVDF, peer: ws.WSChiaConnection):
|
||||
if self.full_node.sync_store.get_sync_mode():
|
||||
return None
|
||||
@ -1311,3 +1319,75 @@ class FullNodeAPI:
|
||||
if self.full_node.sync_store.get_sync_mode():
|
||||
return None
|
||||
await self.full_node.respond_compact_vdf(request, peer)
|
||||
|
||||
@peer_required
|
||||
@api_request
|
||||
async def register_interest_in_puzzle_hash(
|
||||
self, request: wallet_protocol.RegisterForPhUpdates, peer: ws.WSChiaConnection
|
||||
):
|
||||
if peer.peer_node_id not in self.full_node.peer_puzzle_hash:
|
||||
self.full_node.peer_puzzle_hash[peer.peer_node_id] = set()
|
||||
|
||||
if peer.peer_node_id not in self.full_node.peer_sub_counter:
|
||||
self.full_node.peer_sub_counter[peer.peer_node_id] = 0
|
||||
|
||||
# Add peer to the "Subscribed" dictionary
|
||||
for puzzle_hash in request.puzzle_hashes:
|
||||
if puzzle_hash not in self.full_node.ph_subscriptions:
|
||||
self.full_node.ph_subscriptions[puzzle_hash] = set()
|
||||
if (
|
||||
peer.peer_node_id not in self.full_node.ph_subscriptions[puzzle_hash]
|
||||
and self.full_node.peer_sub_counter[peer.peer_node_id] < 100000
|
||||
):
|
||||
self.full_node.ph_subscriptions[puzzle_hash].add(peer.peer_node_id)
|
||||
self.full_node.peer_puzzle_hash[peer.peer_node_id].add(puzzle_hash)
|
||||
self.full_node.peer_sub_counter[peer.peer_node_id] += 1
|
||||
|
||||
# Send all coins with requested puzzle hash that have been created after the specified height
|
||||
states: List[CoinState] = await self.full_node.coin_store.get_coin_states_by_puzzle_hashes(
|
||||
include_spent_coins=True, puzzle_hashes=request.puzzle_hashes, start_height=request.min_height
|
||||
)
|
||||
|
||||
response = wallet_protocol.RespondToPhUpdates(request.puzzle_hashes, request.min_height, states)
|
||||
msg = make_msg(ProtocolMessageTypes.respond_to_ph_update, response)
|
||||
return msg
|
||||
|
||||
@peer_required
|
||||
@api_request
|
||||
async def register_interest_in_coin(
|
||||
self, request: wallet_protocol.RegisterForCoinUpdates, peer: ws.WSChiaConnection
|
||||
):
|
||||
if peer.peer_node_id not in self.full_node.peer_coin_ids:
|
||||
self.full_node.peer_coin_ids[peer.peer_node_id] = set()
|
||||
|
||||
if peer.peer_node_id not in self.full_node.peer_sub_counter:
|
||||
self.full_node.peer_sub_counter[peer.peer_node_id] = 0
|
||||
|
||||
for coin_id in request.coin_ids:
|
||||
if coin_id not in self.full_node.coin_subscriptions:
|
||||
self.full_node.coin_subscriptions[coin_id] = set()
|
||||
if (
|
||||
peer.peer_node_id not in self.full_node.coin_subscriptions[coin_id]
|
||||
and self.full_node.peer_sub_counter[peer.peer_node_id] < 100000
|
||||
):
|
||||
self.full_node.coin_subscriptions[coin_id].add(peer.peer_node_id)
|
||||
self.full_node.peer_coin_ids[peer.peer_node_id].add(coin_id)
|
||||
self.full_node.peer_sub_counter[peer.peer_node_id] += 1
|
||||
|
||||
states: List[CoinState] = await self.full_node.coin_store.get_coin_state_by_ids(
|
||||
include_spent_coins=True, coin_ids=request.coin_ids, start_height=request.min_height
|
||||
)
|
||||
|
||||
response = wallet_protocol.RespondToCoinUpdates(request.coin_ids, request.min_height, states)
|
||||
msg = make_msg(ProtocolMessageTypes.respond_to_coin_update, response)
|
||||
return msg
|
||||
|
||||
@api_request
|
||||
async def request_children(self, request: wallet_protocol.RequestChildren) -> Optional[Message]:
|
||||
coin_records: List[CoinRecord] = await self.full_node.coin_store.get_coin_records_by_parent_ids(
|
||||
True, [request.coin_name]
|
||||
)
|
||||
states = [record.coin_state for record in coin_records]
|
||||
response = wallet_protocol.RespondChildren(states)
|
||||
msg = make_msg(ProtocolMessageTypes.respond_children, response)
|
||||
return msg
|
||||
|
@ -1,22 +1,18 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Tuple, Dict, List, Optional, Set
|
||||
from clvm import SExp
|
||||
from typing import Dict, List, Optional
|
||||
from clvm_rs import STRICT_MODE
|
||||
|
||||
from chia.consensus.cost_calculator import NPCResult
|
||||
from chia.consensus.condition_costs import ConditionCost
|
||||
from chia.full_node.generator import create_generator_args, setup_generator_args
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import NIL
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.coin_record import CoinRecord
|
||||
from chia.types.condition_with_args import ConditionWithArgs
|
||||
from chia.types.generator_types import BlockGenerator
|
||||
from chia.types.name_puzzle_condition import NPC
|
||||
from chia.util.clvm import int_from_bytes, int_to_bytes
|
||||
from chia.util.condition_tools import ConditionOpcode, conditions_by_opcode
|
||||
from chia.util.errors import Err, ValidationError
|
||||
from chia.util.clvm import int_from_bytes
|
||||
from chia.util.condition_tools import ConditionOpcode
|
||||
from chia.util.errors import Err
|
||||
from chia.util.ints import uint32, uint64, uint16
|
||||
from chia.wallet.puzzles.generator_loader import GENERATOR_FOR_SINGLE_COIN_MOD
|
||||
from chia.wallet.puzzles.rom_bootstrap_generator import get_generator
|
||||
@ -24,31 +20,9 @@ from chia.wallet.puzzles.rom_bootstrap_generator import get_generator
|
||||
GENERATOR_MOD = get_generator()
|
||||
|
||||
|
||||
def mempool_assert_announcement(condition: ConditionWithArgs, announcements: Set[bytes32]) -> Optional[Err]:
|
||||
"""
|
||||
Check if an announcement is included in the list of announcements
|
||||
"""
|
||||
announcement_hash = bytes32(condition.vars[0])
|
||||
|
||||
if announcement_hash not in announcements:
|
||||
return Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
|
||||
|
||||
return None
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def mempool_assert_my_coin_id(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]:
|
||||
"""
|
||||
Checks if CoinID matches the id from the condition
|
||||
"""
|
||||
if unspent.coin.name() != condition.vars[0]:
|
||||
log.warning(f"My name: {unspent.coin.name()} got: {condition.vars[0].hex()}")
|
||||
return Err.ASSERT_MY_COIN_ID_FAILED
|
||||
return None
|
||||
|
||||
|
||||
def mempool_assert_absolute_block_height_exceeds(
|
||||
condition: ConditionWithArgs, prev_transaction_block_height: uint32
|
||||
) -> Optional[Err]:
|
||||
@ -114,250 +88,7 @@ def mempool_assert_relative_time_exceeds(
|
||||
return None
|
||||
|
||||
|
||||
def mempool_assert_my_parent_id(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]:
|
||||
"""
|
||||
Checks if coin's parent ID matches the ID from the condition
|
||||
"""
|
||||
if unspent.coin.parent_coin_info != condition.vars[0]:
|
||||
return Err.ASSERT_MY_PARENT_ID_FAILED
|
||||
return None
|
||||
|
||||
|
||||
def mempool_assert_my_puzzlehash(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]:
|
||||
"""
|
||||
Checks if coin's puzzlehash matches the puzzlehash from the condition
|
||||
"""
|
||||
if unspent.coin.puzzle_hash != condition.vars[0]:
|
||||
return Err.ASSERT_MY_PUZZLEHASH_FAILED
|
||||
return None
|
||||
|
||||
|
||||
def mempool_assert_my_amount(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]:
|
||||
"""
|
||||
Checks if coin's amount matches the amount from the condition
|
||||
"""
|
||||
if unspent.coin.amount != int_from_bytes(condition.vars[0]):
|
||||
return Err.ASSERT_MY_AMOUNT_FAILED
|
||||
return None
|
||||
|
||||
|
||||
def sanitize_int(n: SExp, safe_mode: bool) -> int:
|
||||
buf = n.atom
|
||||
if safe_mode and len(buf) > 2 and buf[0] == 0 and buf[1] == 0:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
return n.as_int()
|
||||
|
||||
|
||||
def parse_aggsig(args: SExp) -> List[bytes]:
|
||||
pubkey = args.first().atom
|
||||
args = args.rest()
|
||||
message = args.first().atom
|
||||
if len(pubkey) != 48:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
if len(message) > 1024:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
# agg sig conditions only take 2 parameters
|
||||
args = args.rest()
|
||||
# the list is terminated by having a right-element that's not another pair,
|
||||
# just like as_atom_list() (see chia/types/blockchain_format/program.py)
|
||||
if args.pair is not None:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
return [pubkey, message]
|
||||
|
||||
|
||||
def parse_create_coin(args: SExp, safe_mode: bool) -> List[bytes]:
|
||||
puzzle_hash = args.first().atom
|
||||
args = args.rest()
|
||||
if len(puzzle_hash) != 32:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
amount_int = sanitize_int(args.first(), safe_mode)
|
||||
if amount_int >= 2 ** 64:
|
||||
raise ValidationError(Err.COIN_AMOUNT_EXCEEDS_MAXIMUM)
|
||||
if amount_int < 0:
|
||||
raise ValidationError(Err.COIN_AMOUNT_NEGATIVE)
|
||||
# note that this may change the representation of amount. If the original
|
||||
# buffer had redundant leading zeroes, they will be stripped
|
||||
return [puzzle_hash, int_to_bytes(amount_int)]
|
||||
|
||||
|
||||
def parse_seconds(args: SExp, safe_mode: bool, error_code: Err) -> Optional[List[bytes]]:
|
||||
seconds_int = sanitize_int(args.first(), safe_mode)
|
||||
# this condition is inherently satisified, there is no need to keep it
|
||||
if seconds_int <= 0:
|
||||
return None
|
||||
if seconds_int >= 2 ** 64:
|
||||
raise ValidationError(error_code)
|
||||
# note that this may change the representation of seconds. If the original
|
||||
# buffer had redundant leading zeroes, they will be stripped
|
||||
return [int_to_bytes(seconds_int)]
|
||||
|
||||
|
||||
def parse_height(args: SExp, safe_mode: bool, error_code: Err) -> Optional[List[bytes]]:
|
||||
height_int = sanitize_int(args.first(), safe_mode)
|
||||
# this condition is inherently satisified, there is no need to keep it
|
||||
if height_int < 0:
|
||||
return None
|
||||
if height_int >= 2 ** 32:
|
||||
raise ValidationError(error_code)
|
||||
# note that this may change the representation of the height. If the original
|
||||
# buffer had redundant leading zeroes, they will be stripped
|
||||
return [int_to_bytes(height_int)]
|
||||
|
||||
|
||||
def parse_fee(args: SExp, safe_mode: bool) -> List[bytes]:
|
||||
fee_int = sanitize_int(args.first(), safe_mode)
|
||||
if fee_int >= 2 ** 64 or fee_int < 0:
|
||||
raise ValidationError(Err.RESERVE_FEE_CONDITION_FAILED)
|
||||
# note that this may change the representation of the fee. If the original
|
||||
# buffer had redundant leading zeroes, they will be stripped
|
||||
return [int_to_bytes(fee_int)]
|
||||
|
||||
|
||||
def parse_hash(args: SExp, error_code: Err) -> List[bytes]:
|
||||
h = args.first().atom
|
||||
if len(h) != 32:
|
||||
raise ValidationError(error_code)
|
||||
return [h]
|
||||
|
||||
|
||||
def parse_amount(args: SExp, safe_mode: bool) -> List[bytes]:
|
||||
amount_int = sanitize_int(args.first(), safe_mode)
|
||||
if amount_int < 0:
|
||||
raise ValidationError(Err.ASSERT_MY_AMOUNT_FAILED)
|
||||
if amount_int >= 2 ** 64:
|
||||
raise ValidationError(Err.ASSERT_MY_AMOUNT_FAILED)
|
||||
# note that this may change the representation of amount. If the original
|
||||
# buffer had redundant leading zeroes, they will be stripped
|
||||
return [int_to_bytes(amount_int)]
|
||||
|
||||
|
||||
def parse_announcement(args: SExp) -> List[bytes]:
|
||||
msg = args.first().atom
|
||||
if len(msg) > 1024:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
return [msg]
|
||||
|
||||
|
||||
def parse_condition_args(args: SExp, condition: ConditionOpcode, safe_mode: bool) -> Tuple[int, Optional[List[bytes]]]:
|
||||
"""
|
||||
Parse a list with exactly the expected args, given opcode,
|
||||
from an SExp into a list of bytes. If there are fewer or more elements in
|
||||
the list, raise a RuntimeError. If the condition is inherently true (such as
|
||||
a time- or height lock with a negative time or height, the returned list is None
|
||||
"""
|
||||
op = ConditionOpcode
|
||||
cc = ConditionCost
|
||||
if condition is op.AGG_SIG_UNSAFE or condition is op.AGG_SIG_ME:
|
||||
return cc.AGG_SIG.value, parse_aggsig(args)
|
||||
elif condition is op.CREATE_COIN:
|
||||
return cc.CREATE_COIN.value, parse_create_coin(args, safe_mode)
|
||||
elif condition is op.ASSERT_SECONDS_ABSOLUTE:
|
||||
return cc.ASSERT_SECONDS_ABSOLUTE.value, parse_seconds(args, safe_mode, Err.ASSERT_SECONDS_ABSOLUTE_FAILED)
|
||||
elif condition is op.ASSERT_SECONDS_RELATIVE:
|
||||
return cc.ASSERT_SECONDS_RELATIVE.value, parse_seconds(args, safe_mode, Err.ASSERT_SECONDS_RELATIVE_FAILED)
|
||||
elif condition is op.ASSERT_HEIGHT_ABSOLUTE:
|
||||
return cc.ASSERT_HEIGHT_ABSOLUTE.value, parse_height(args, safe_mode, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED)
|
||||
elif condition is op.ASSERT_HEIGHT_RELATIVE:
|
||||
return cc.ASSERT_HEIGHT_RELATIVE.value, parse_height(args, safe_mode, Err.ASSERT_HEIGHT_RELATIVE_FAILED)
|
||||
elif condition is op.ASSERT_MY_COIN_ID:
|
||||
return cc.ASSERT_MY_COIN_ID.value, parse_hash(args, Err.ASSERT_MY_COIN_ID_FAILED)
|
||||
elif condition is op.RESERVE_FEE:
|
||||
return cc.RESERVE_FEE.value, parse_fee(args, safe_mode)
|
||||
elif condition is op.CREATE_COIN_ANNOUNCEMENT:
|
||||
return cc.CREATE_COIN_ANNOUNCEMENT.value, parse_announcement(args)
|
||||
elif condition is op.ASSERT_COIN_ANNOUNCEMENT:
|
||||
return cc.ASSERT_COIN_ANNOUNCEMENT.value, parse_hash(args, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
|
||||
elif condition is op.CREATE_PUZZLE_ANNOUNCEMENT:
|
||||
return cc.CREATE_PUZZLE_ANNOUNCEMENT.value, parse_announcement(args)
|
||||
elif condition is op.ASSERT_PUZZLE_ANNOUNCEMENT:
|
||||
return cc.ASSERT_PUZZLE_ANNOUNCEMENT.value, parse_hash(args, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
|
||||
elif condition is op.ASSERT_MY_PARENT_ID:
|
||||
return cc.ASSERT_MY_PARENT_ID.value, parse_hash(args, Err.ASSERT_MY_PARENT_ID_FAILED)
|
||||
elif condition is op.ASSERT_MY_PUZZLEHASH:
|
||||
return cc.ASSERT_MY_PUZZLEHASH.value, parse_hash(args, Err.ASSERT_MY_PUZZLEHASH_FAILED)
|
||||
elif condition is op.ASSERT_MY_AMOUNT:
|
||||
return cc.ASSERT_MY_AMOUNT.value, parse_amount(args, safe_mode)
|
||||
else:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
|
||||
|
||||
CONDITION_OPCODES: Set[bytes] = set(item.value for item in ConditionOpcode)
|
||||
|
||||
|
||||
def parse_condition(cond: SExp, safe_mode: bool) -> Tuple[int, Optional[ConditionWithArgs]]:
|
||||
condition = cond.first().as_atom()
|
||||
if condition in CONDITION_OPCODES:
|
||||
opcode: ConditionOpcode = ConditionOpcode(condition)
|
||||
cost, args = parse_condition_args(cond.rest(), opcode, safe_mode)
|
||||
cvl = ConditionWithArgs(opcode, args) if args is not None else None
|
||||
elif not safe_mode:
|
||||
# we don't need to save unknown conditions. We can't do anything with them anyway
|
||||
# safe_mode just tells us whether we can tolerate them or not
|
||||
return 0, None
|
||||
else:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
return cost, cvl
|
||||
|
||||
|
||||
def get_name_puzzle_conditions_python(
|
||||
generator: BlockGenerator, max_cost: int, *, cost_per_byte: int, safe_mode: bool
|
||||
) -> NPCResult:
|
||||
"""
|
||||
This executes the generator program and returns the coins and their
|
||||
conditions. If the cost of the program (size, CLVM execution and conditions)
|
||||
exceed max_cost, the function fails. In order to accurately take the size
|
||||
of the program into account when calculating cost, cost_per_byte must be
|
||||
specified.
|
||||
safe_mode determines whether the clvm program and conditions are executed in
|
||||
strict mode or not. When in safe/strict mode, unknow operations or conditions
|
||||
are considered failures. This is the mode when accepting transactions into
|
||||
the mempool.
|
||||
"""
|
||||
block_program, block_program_args = setup_generator_args(generator)
|
||||
max_cost -= len(bytes(generator.program)) * cost_per_byte
|
||||
if max_cost < 0:
|
||||
return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0))
|
||||
if safe_mode:
|
||||
clvm_cost, result = GENERATOR_MOD.run_safe_with_cost(max_cost, block_program, block_program_args)
|
||||
else:
|
||||
clvm_cost, result = GENERATOR_MOD.run_with_cost(max_cost, block_program, block_program_args)
|
||||
|
||||
max_cost -= clvm_cost
|
||||
if max_cost < 0:
|
||||
return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0))
|
||||
npc_list: List[NPC] = []
|
||||
|
||||
for res in result.first().as_iter():
|
||||
conditions_list: List[ConditionWithArgs] = []
|
||||
|
||||
if len(res.first().atom) != 32:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
spent_coin_parent_id: bytes32 = res.first().as_atom()
|
||||
res = res.rest()
|
||||
if len(res.first().atom) != 32:
|
||||
raise ValidationError(Err.INVALID_CONDITION)
|
||||
spent_coin_puzzle_hash: bytes32 = res.first().as_atom()
|
||||
res = res.rest()
|
||||
spent_coin_amount: uint64 = uint64(sanitize_int(res.first(), safe_mode))
|
||||
res = res.rest()
|
||||
spent_coin: Coin = Coin(spent_coin_parent_id, spent_coin_puzzle_hash, spent_coin_amount)
|
||||
|
||||
for cond in res.first().as_iter():
|
||||
cost, cvl = parse_condition(cond, safe_mode)
|
||||
max_cost -= cost
|
||||
if max_cost < 0:
|
||||
return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0))
|
||||
if cvl is not None:
|
||||
conditions_list.append(cvl)
|
||||
|
||||
conditions_dict = conditions_by_opcode(conditions_list)
|
||||
if conditions_dict is None:
|
||||
conditions_dict = {}
|
||||
npc_list.append(NPC(spent_coin.name(), spent_coin.puzzle_hash, [(a, b) for a, b in conditions_dict.items()]))
|
||||
return NPCResult(None, npc_list, uint64(clvm_cost))
|
||||
|
||||
|
||||
def get_name_puzzle_conditions_rust(
|
||||
def get_name_puzzle_conditions(
|
||||
generator: BlockGenerator, max_cost: int, *, cost_per_byte: int, safe_mode: bool
|
||||
) -> NPCResult:
|
||||
block_program, block_program_args = setup_generator_args(generator)
|
||||
@ -366,47 +97,21 @@ def get_name_puzzle_conditions_rust(
|
||||
return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0))
|
||||
|
||||
flags = STRICT_MODE if safe_mode else 0
|
||||
err, result, clvm_cost = GENERATOR_MOD.run_as_generator(max_cost, flags, block_program, block_program_args)
|
||||
if err is not None:
|
||||
return NPCResult(uint16(err), [], uint64(0))
|
||||
else:
|
||||
npc_list = []
|
||||
for r in result:
|
||||
conditions = []
|
||||
for c in r.conditions:
|
||||
cwa = []
|
||||
for cond_list in c[1]:
|
||||
cwa.append(ConditionWithArgs(ConditionOpcode(bytes([cond_list.opcode])), cond_list.vars))
|
||||
conditions.append((ConditionOpcode(bytes([c[0]])), cwa))
|
||||
npc_list.append(NPC(r.coin_name, r.puzzle_hash, conditions))
|
||||
return NPCResult(None, npc_list, uint64(clvm_cost))
|
||||
|
||||
|
||||
def get_name_puzzle_conditions(
|
||||
generator: BlockGenerator, max_cost: int, *, cost_per_byte: int, safe_mode: bool, rust_checker: bool
|
||||
) -> NPCResult:
|
||||
"""
|
||||
This executes the generator program and returns the coins and their
|
||||
conditions. If the cost of the program (size, CLVM execution and conditions)
|
||||
exceed max_cost, the function fails. In order to accurately take the size
|
||||
of the program into account when calculating cost, cost_per_byte must be
|
||||
specified.
|
||||
safe_mode determines whether the clvm program and conditions are executed in
|
||||
strict mode or not. When in safe/strict mode, unknow operations or conditions
|
||||
are considered failures. This is the mode when accepting transactions into
|
||||
the mempool.
|
||||
"""
|
||||
try:
|
||||
if rust_checker:
|
||||
return get_name_puzzle_conditions_rust(
|
||||
generator, max_cost, cost_per_byte=cost_per_byte, safe_mode=safe_mode
|
||||
)
|
||||
err, result, clvm_cost = GENERATOR_MOD.run_as_generator(max_cost, flags, block_program, block_program_args)
|
||||
if err is not None:
|
||||
return NPCResult(uint16(err), [], uint64(0))
|
||||
else:
|
||||
return get_name_puzzle_conditions_python(
|
||||
generator, max_cost, cost_per_byte=cost_per_byte, safe_mode=safe_mode
|
||||
)
|
||||
except ValidationError as e:
|
||||
return NPCResult(uint16(e.code.value), [], uint64(0))
|
||||
npc_list = []
|
||||
for r in result:
|
||||
conditions = []
|
||||
for c in r.conditions:
|
||||
cwa = []
|
||||
for cond_list in c[1]:
|
||||
cwa.append(ConditionWithArgs(ConditionOpcode(bytes([cond_list.opcode])), cond_list.vars))
|
||||
conditions.append((ConditionOpcode(bytes([c[0]])), cwa))
|
||||
npc_list.append(NPC(r.coin_name, r.puzzle_hash, conditions))
|
||||
return NPCResult(None, npc_list, uint64(clvm_cost))
|
||||
except Exception as e:
|
||||
log.debug(f"get_name_puzzle_condition failed: {e}")
|
||||
return NPCResult(uint16(Err.GENERATOR_RUNTIME_ERROR.value), [], uint64(0))
|
||||
@ -432,8 +137,6 @@ def get_puzzle_and_solution_for_coin(generator: BlockGenerator, coin_name: bytes
|
||||
|
||||
def mempool_check_conditions_dict(
|
||||
unspent: CoinRecord,
|
||||
coin_announcement_names: Set[bytes32],
|
||||
puzzle_announcement_names: Set[bytes32],
|
||||
conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]],
|
||||
prev_transaction_block_height: uint32,
|
||||
timestamp: uint64,
|
||||
@ -445,13 +148,7 @@ def mempool_check_conditions_dict(
|
||||
cvp: ConditionWithArgs
|
||||
for cvp in con_list:
|
||||
error: Optional[Err] = None
|
||||
if cvp.opcode is ConditionOpcode.ASSERT_MY_COIN_ID:
|
||||
error = mempool_assert_my_coin_id(cvp, unspent)
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT:
|
||||
error = mempool_assert_announcement(cvp, coin_announcement_names)
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT:
|
||||
error = mempool_assert_announcement(cvp, puzzle_announcement_names)
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE:
|
||||
if cvp.opcode is ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE:
|
||||
error = mempool_assert_absolute_block_height_exceeds(cvp, prev_transaction_block_height)
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_HEIGHT_RELATIVE:
|
||||
error = mempool_assert_relative_block_height_exceeds(cvp, unspent, prev_transaction_block_height)
|
||||
@ -459,12 +156,18 @@ def mempool_check_conditions_dict(
|
||||
error = mempool_assert_absolute_time_exceeds(cvp, timestamp)
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_SECONDS_RELATIVE:
|
||||
error = mempool_assert_relative_time_exceeds(cvp, unspent, timestamp)
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_MY_COIN_ID:
|
||||
assert False
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT:
|
||||
assert False
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT:
|
||||
assert False
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_MY_PARENT_ID:
|
||||
error = mempool_assert_my_parent_id(cvp, unspent)
|
||||
assert False
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_MY_PUZZLEHASH:
|
||||
error = mempool_assert_my_puzzlehash(cvp, unspent)
|
||||
assert False
|
||||
elif cvp.opcode is ConditionOpcode.ASSERT_MY_AMOUNT:
|
||||
error = mempool_assert_my_amount(cvp, unspent)
|
||||
assert False
|
||||
if error:
|
||||
return error
|
||||
|
||||
|
@ -29,8 +29,6 @@ from chia.types.spend_bundle import SpendBundle
|
||||
from chia.util.clvm import int_from_bytes
|
||||
from chia.util.condition_tools import (
|
||||
pkm_pairs_for_conditions_dict,
|
||||
coin_announcements_names_for_npc,
|
||||
puzzle_announcements_names_for_npc,
|
||||
)
|
||||
from chia.util.errors import Err
|
||||
from chia.util.generator_tools import additions_for_npc
|
||||
@ -43,9 +41,7 @@ log = logging.getLogger(__name__)
|
||||
def get_npc_multiprocess(spend_bundle_bytes: bytes, max_cost: int, cost_per_byte: int) -> bytes:
|
||||
program = simple_solution_generator(SpendBundle.from_bytes(spend_bundle_bytes))
|
||||
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
|
||||
return bytes(
|
||||
get_name_puzzle_conditions(program, max_cost, cost_per_byte=cost_per_byte, safe_mode=True, rust_checker=True)
|
||||
)
|
||||
return bytes(get_name_puzzle_conditions(program, max_cost, cost_per_byte=cost_per_byte, safe_mode=True))
|
||||
|
||||
|
||||
class MempoolManager:
|
||||
@ -382,8 +378,6 @@ class MempoolManager:
|
||||
pks: List[G1Element] = []
|
||||
msgs: List[bytes32] = []
|
||||
error: Optional[Err] = None
|
||||
coin_announcements_in_spend: Set[bytes32] = coin_announcements_names_for_npc(npc_list)
|
||||
puzzle_announcements_in_spend: Set[bytes32] = puzzle_announcements_names_for_npc(npc_list)
|
||||
for npc in npc_list:
|
||||
coin_record: CoinRecord = removal_record_dict[npc.coin_name]
|
||||
# Check that the revealed removal puzzles actually match the puzzle hash
|
||||
@ -398,8 +392,6 @@ class MempoolManager:
|
||||
assert self.peak.timestamp is not None
|
||||
error = mempool_check_conditions_dict(
|
||||
coin_record,
|
||||
coin_announcements_in_spend,
|
||||
puzzle_announcements_in_spend,
|
||||
npc.condition_dict,
|
||||
uint32(chialisp_height),
|
||||
self.peak.timestamp,
|
||||
|
@ -86,3 +86,12 @@ class ProtocolMessageTypes(Enum):
|
||||
new_signage_point_harvester = 66
|
||||
request_plots = 67
|
||||
respond_plots = 68
|
||||
|
||||
# More wallet protocol
|
||||
coin_state_update = 69
|
||||
register_interest_in_puzzle_hash = 70
|
||||
respond_to_ph_update = 71
|
||||
register_interest_in_coin = 72
|
||||
respond_to_coin_update = 73
|
||||
request_children = 74
|
||||
respond_children = 75
|
||||
|
64
chia/protocols/protocol_state_machine.py
Normal file
64
chia/protocols/protocol_state_machine.py
Normal file
@ -0,0 +1,64 @@
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes as pmt, ProtocolMessageTypes
|
||||
|
||||
NO_REPLY_EXPECTED = [
|
||||
# full_node -> full_node messages
|
||||
pmt.new_peak,
|
||||
pmt.new_transaction,
|
||||
pmt.new_unfinished_block,
|
||||
pmt.new_signage_point_or_end_of_sub_slot,
|
||||
pmt.request_mempool_transactions,
|
||||
pmt.new_compact_vdf,
|
||||
pmt.request_mempool_transactions,
|
||||
]
|
||||
|
||||
"""
|
||||
VAILD_REPLY_MESSAGE_MAP:
|
||||
key: sent message type.
|
||||
value: valid reply message types, from the view of the requester.
|
||||
A state machine can be built from this message map.
|
||||
"""
|
||||
|
||||
VAILD_REPLY_MESSAGE_MAP = {
|
||||
# messages for all services
|
||||
# pmt.handshake is handled in WSChiaConnection.perform_handshake
|
||||
# full_node -> full_node protocol messages
|
||||
pmt.request_transaction: [pmt.respond_transaction],
|
||||
pmt.request_proof_of_weight: [pmt.respond_proof_of_weight],
|
||||
pmt.request_block: [pmt.respond_block, pmt.reject_block],
|
||||
pmt.request_blocks: [pmt.respond_blocks, pmt.reject_blocks],
|
||||
pmt.request_unfinished_block: [pmt.respond_unfinished_block],
|
||||
pmt.request_signage_point_or_end_of_sub_slot: [pmt.respond_signage_point, pmt.respond_end_of_sub_slot],
|
||||
pmt.request_compact_vdf: [pmt.respond_compact_vdf],
|
||||
pmt.request_peers: [pmt.respond_peers],
|
||||
}
|
||||
|
||||
|
||||
def static_check_sent_message_response() -> None:
|
||||
"""Check that allowed message data structures VALID_REPLY_MESSAGE_MAP and NO_REPLY_EXPECTED are consistent."""
|
||||
# Reply and non-reply sets should not overlap: This check should be static
|
||||
overlap = set(NO_REPLY_EXPECTED).intersection(set(VAILD_REPLY_MESSAGE_MAP.keys()))
|
||||
if len(overlap) != 0:
|
||||
raise AssertionError("Overlapping NO_REPLY_EXPECTED and VAILD_REPLY_MESSAGE_MAP values: {}")
|
||||
|
||||
|
||||
def message_requires_reply(sent: ProtocolMessageTypes) -> bool:
|
||||
"""Return True if message has an entry in the full node P2P message map"""
|
||||
# If we knew the peer NodeType is FULL_NODE, we could also check `sent not in NO_REPLY_EXPECTED`
|
||||
return sent in VAILD_REPLY_MESSAGE_MAP
|
||||
|
||||
|
||||
def message_response_ok(sent: ProtocolMessageTypes, received: ProtocolMessageTypes) -> bool:
|
||||
"""
|
||||
Check to see that peers respect protocol message types in reply.
|
||||
Call with received == None to indicate that we do not expect a specific reply message type.
|
||||
"""
|
||||
# Errors below are runtime protocol message mismatches from peers
|
||||
if sent in VAILD_REPLY_MESSAGE_MAP:
|
||||
if received not in VAILD_REPLY_MESSAGE_MAP[sent]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# Run `static_check_sent_message_response` to check this static invariant at import time
|
||||
static_check_sent_message_response()
|
4
chia/protocols/protocol_timing.py
Normal file
4
chia/protocols/protocol_timing.py
Normal file
@ -0,0 +1,4 @@
|
||||
# These settings should not be end-user configurable
|
||||
INVALID_PROTOCOL_BAN_SECONDS = 10
|
||||
API_EXCEPTION_BAN_SECONDS = 10
|
||||
INTERNAL_PROTOCOL_ERROR_BAN_SECONDS = 10 # Don't flap if our client is at fault
|
@ -113,7 +113,7 @@ class RejectRemovalsRequest(Streamable):
|
||||
@streamable
|
||||
class RequestAdditions(Streamable):
|
||||
height: uint32
|
||||
header_hash: bytes32
|
||||
header_hash: Optional[bytes32]
|
||||
puzzle_hashes: Optional[List[bytes32]]
|
||||
|
||||
|
||||
@ -153,3 +153,62 @@ class RespondHeaderBlocks(Streamable):
|
||||
start_height: uint32
|
||||
end_height: uint32
|
||||
header_blocks: List[HeaderBlock]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class CoinState(Streamable):
|
||||
coin: Coin
|
||||
spent_height: Optional[uint32]
|
||||
created_height: Optional[uint32]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class RegisterForPhUpdates(Streamable):
|
||||
puzzle_hashes: List[bytes32]
|
||||
min_height: uint32
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class RespondToPhUpdates(Streamable):
|
||||
puzzle_hashes: List[bytes32]
|
||||
min_height: uint32
|
||||
coin_states: List[CoinState]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class RegisterForCoinUpdates(Streamable):
|
||||
coin_ids: List[bytes32]
|
||||
min_height: uint32
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class RespondToCoinUpdates(Streamable):
|
||||
coin_ids: List[bytes32]
|
||||
min_height: uint32
|
||||
coin_states: List[CoinState]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class CoinStateUpdate(Streamable):
|
||||
height: uint32
|
||||
fork_height: uint32
|
||||
peak_hash: bytes32
|
||||
items: List[CoinState]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class RequestChildren(Streamable):
|
||||
coin_name: bytes32
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@streamable
|
||||
class RespondChildren(Streamable):
|
||||
coin_states: List[CoinState]
|
||||
|
@ -38,8 +38,6 @@ class AddressManagerStore:
|
||||
self = cls()
|
||||
self.db = connection
|
||||
await self.db.commit()
|
||||
await self.db.execute("pragma journal_mode=wal")
|
||||
await self.db.execute("pragma synchronous=2")
|
||||
await self.db.execute("CREATE TABLE IF NOT EXISTS peer_metadata(key text,value text)")
|
||||
await self.db.commit()
|
||||
|
||||
|
@ -92,6 +92,8 @@ class FullNodeDiscovery:
|
||||
async def initialize_address_manager(self) -> None:
|
||||
mkdir(self.peer_db_path.parent)
|
||||
self.connection = await aiosqlite.connect(self.peer_db_path)
|
||||
await self.connection.execute("pragma journal_mode=wal")
|
||||
await self.connection.execute("pragma synchronous=NORMAL")
|
||||
self.address_manager_store = await AddressManagerStore.create(self.connection)
|
||||
if not await self.address_manager_store.is_empty():
|
||||
self.address_manager = await self.address_manager_store.deserialize()
|
||||
|
@ -76,9 +76,9 @@ rate_limits_other = {
|
||||
ProtocolMessageTypes.new_compact_vdf: RLSettings(100, 1024),
|
||||
ProtocolMessageTypes.request_peers: RLSettings(10, 100),
|
||||
ProtocolMessageTypes.respond_peers: RLSettings(10, 1 * 1024 * 1024),
|
||||
ProtocolMessageTypes.request_puzzle_solution: RLSettings(100, 100),
|
||||
ProtocolMessageTypes.respond_puzzle_solution: RLSettings(100, 1024 * 1024),
|
||||
ProtocolMessageTypes.reject_puzzle_solution: RLSettings(100, 100),
|
||||
ProtocolMessageTypes.request_puzzle_solution: RLSettings(1000, 100),
|
||||
ProtocolMessageTypes.respond_puzzle_solution: RLSettings(1000, 1024 * 1024),
|
||||
ProtocolMessageTypes.reject_puzzle_solution: RLSettings(1000, 100),
|
||||
ProtocolMessageTypes.new_peak_wallet: RLSettings(200, 300),
|
||||
ProtocolMessageTypes.request_block_header: RLSettings(500, 100),
|
||||
ProtocolMessageTypes.respond_block_header: RLSettings(500, 500 * 1024),
|
||||
@ -97,6 +97,11 @@ rate_limits_other = {
|
||||
ProtocolMessageTypes.farm_new_block: RLSettings(200, 200),
|
||||
ProtocolMessageTypes.request_plots: RLSettings(10, 10 * 1024 * 1024),
|
||||
ProtocolMessageTypes.respond_plots: RLSettings(10, 100 * 1024 * 1024),
|
||||
ProtocolMessageTypes.coin_state_update: RLSettings(1000, 100 * 1024 * 1024),
|
||||
ProtocolMessageTypes.register_interest_in_puzzle_hash: RLSettings(1000, 100 * 1024 * 1024),
|
||||
ProtocolMessageTypes.respond_to_ph_update: RLSettings(1000, 100 * 1024 * 1024),
|
||||
ProtocolMessageTypes.register_interest_in_coin: RLSettings(1000, 100 * 1024 * 1024),
|
||||
ProtocolMessageTypes.respond_to_coin_update: RLSettings(1000, 100 * 1024 * 1024),
|
||||
}
|
||||
|
||||
|
||||
|
@ -16,6 +16,8 @@ from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes
|
||||
from chia.protocols.protocol_state_machine import message_requires_reply
|
||||
from chia.protocols.protocol_timing import INVALID_PROTOCOL_BAN_SECONDS, API_EXCEPTION_BAN_SECONDS
|
||||
from chia.protocols.shared_protocol import protocol_version
|
||||
from chia.server.introducer_peers import IntroducerPeers
|
||||
from chia.server.outbound_message import Message, NodeType
|
||||
@ -160,8 +162,8 @@ class ChiaServer:
|
||||
|
||||
self.tasks_from_peer: Dict[bytes32, Set[bytes32]] = {}
|
||||
self.banned_peers: Dict[str, float] = {}
|
||||
self.invalid_protocol_ban_seconds = 10
|
||||
self.api_exception_ban_seconds = 10
|
||||
self.invalid_protocol_ban_seconds = INVALID_PROTOCOL_BAN_SECONDS
|
||||
self.api_exception_ban_seconds = API_EXCEPTION_BAN_SECONDS
|
||||
self.exempt_peer_networks: List[Union[IPv4Network, IPv6Network]] = [
|
||||
ip_network(net, strict=False) for net in config.get("exempt_peer_networks", [])
|
||||
]
|
||||
@ -373,7 +375,11 @@ class ChiaServer:
|
||||
session = None
|
||||
connection: Optional[WSChiaConnection] = None
|
||||
try:
|
||||
timeout = ClientTimeout(total=30)
|
||||
# Crawler/DNS introducer usually uses a lower timeout than the default
|
||||
timeout_value = (
|
||||
30 if "peer_connect_timeout" not in self.config else float(self.config["peer_connect_timeout"])
|
||||
)
|
||||
timeout = ClientTimeout(total=timeout_value)
|
||||
session = ClientSession(timeout=timeout)
|
||||
|
||||
try:
|
||||
@ -608,13 +614,29 @@ class ChiaServer:
|
||||
for message in messages:
|
||||
await connection.send_message(message)
|
||||
|
||||
async def validate_broadcast_message_type(self, messages: List[Message], node_type: NodeType):
|
||||
for message in messages:
|
||||
if message_requires_reply(ProtocolMessageTypes(message.type)):
|
||||
# Internal protocol logic error - we will raise, blocking messages to all peers
|
||||
self.log.error(f"Attempt to broadcast message requiring protocol response: {message.type}")
|
||||
for _, connection in self.all_connections.items():
|
||||
if connection.connection_type is node_type:
|
||||
await connection.close(
|
||||
self.invalid_protocol_ban_seconds,
|
||||
WSCloseCode.INTERNAL_ERROR,
|
||||
Err.INTERNAL_PROTOCOL_ERROR,
|
||||
)
|
||||
raise ProtocolError(Err.INTERNAL_PROTOCOL_ERROR, [message.type])
|
||||
|
||||
async def send_to_all(self, messages: List[Message], node_type: NodeType):
|
||||
await self.validate_broadcast_message_type(messages, node_type)
|
||||
for _, connection in self.all_connections.items():
|
||||
if connection.connection_type is node_type:
|
||||
for message in messages:
|
||||
await connection.send_message(message)
|
||||
|
||||
async def send_to_all_except(self, messages: List[Message], node_type: NodeType, exclude: bytes32):
|
||||
await self.validate_broadcast_message_type(messages, node_type)
|
||||
for _, connection in self.all_connections.items():
|
||||
if connection.connection_type is node_type and connection.peer_node_id != exclude:
|
||||
for message in messages:
|
||||
|
@ -65,7 +65,7 @@ class UPnP:
|
||||
return
|
||||
self.queue.put(("shutdown",))
|
||||
log.info("UPnP, shutting down thread")
|
||||
self.thread.join()
|
||||
self.thread.join(5)
|
||||
self.thread = None
|
||||
|
||||
# this is here just in case the UPnP object is destroyed non-gracefully,
|
||||
|
@ -8,6 +8,8 @@ from aiohttp import WSCloseCode, WSMessage, WSMsgType
|
||||
|
||||
from chia.cmds.init_funcs import chia_full_version_str
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes
|
||||
from chia.protocols.protocol_state_machine import message_response_ok
|
||||
from chia.protocols.protocol_timing import INTERNAL_PROTOCOL_ERROR_BAN_SECONDS
|
||||
from chia.protocols.shared_protocol import Capability, Handshake
|
||||
from chia.server.outbound_message import Message, NodeType, make_msg
|
||||
from chia.server.rate_limits import RateLimiter
|
||||
@ -103,6 +105,9 @@ class WSChiaConnection:
|
||||
self.outbound_rate_limiter = RateLimiter(incoming=False, percentage_of_limit=outbound_rate_limit_percent)
|
||||
self.inbound_rate_limiter = RateLimiter(incoming=True, percentage_of_limit=inbound_rate_limit_percent)
|
||||
|
||||
# Used by crawler/dns introducer
|
||||
self.version = None
|
||||
|
||||
async def perform_handshake(self, network_id: str, protocol_version: str, server_port: int, local_type: NodeType):
|
||||
if self.is_outbound:
|
||||
outbound_handshake = make_msg(
|
||||
@ -135,6 +140,8 @@ class WSChiaConnection:
|
||||
if inbound_handshake.network_id != network_id:
|
||||
raise ProtocolError(Err.INCOMPATIBLE_NETWORK_ID)
|
||||
|
||||
self.version = inbound_handshake.software_version
|
||||
|
||||
self.peer_server_port = inbound_handshake.server_port
|
||||
self.connection_type = NodeType(inbound_handshake.node_type)
|
||||
|
||||
@ -212,6 +219,12 @@ class WSChiaConnection:
|
||||
raise
|
||||
self.close_callback(self, ban_time)
|
||||
|
||||
async def ban_peer_bad_protocol(self, log_err_msg: str):
|
||||
"""Ban peer for protocol violation"""
|
||||
ban_seconds = INTERNAL_PROTOCOL_ERROR_BAN_SECONDS
|
||||
self.log.error(f"Banning peer for {ban_seconds} seconds: {self.peer_host} {log_err_msg}")
|
||||
await self.close(ban_seconds, WSCloseCode.PROTOCOL_ERROR, Err.INVALID_PROTOCOL_MESSAGE)
|
||||
|
||||
def cancel_pending_timeouts(self):
|
||||
for _, task in self.pending_timeouts.items():
|
||||
task.cancel()
|
||||
@ -269,14 +282,22 @@ class WSChiaConnection:
|
||||
if attribute is None:
|
||||
raise AttributeError(f"Node type {self.connection_type} does not have method {attr_name}")
|
||||
|
||||
msg = Message(uint8(getattr(ProtocolMessageTypes, attr_name).value), None, args[0])
|
||||
msg: Message = Message(uint8(getattr(ProtocolMessageTypes, attr_name).value), None, args[0])
|
||||
request_start_t = time.time()
|
||||
result = await self.create_request(msg, timeout)
|
||||
result = await self.send_request(msg, timeout)
|
||||
self.log.debug(
|
||||
f"Time for request {attr_name}: {self.get_peer_logging()} = {time.time() - request_start_t}, "
|
||||
f"None? {result is None}"
|
||||
)
|
||||
if result is not None:
|
||||
sent_message_type = ProtocolMessageTypes(msg.type)
|
||||
recv_message_type = ProtocolMessageTypes(result.type)
|
||||
if not message_response_ok(sent_message_type, recv_message_type):
|
||||
# peer protocol violation
|
||||
error_message = f"WSConnection.invoke sent message {sent_message_type.name} "
|
||||
f"but received {recv_message_type.name}"
|
||||
await self.ban_peer_bad_protocol(self.error_message)
|
||||
raise ProtocolError(Err.INVALID_PROTOCOL_MESSAGE, [error_message])
|
||||
ret_attr = getattr(class_for_type(self.local_type), ProtocolMessageTypes(result.type).name, None)
|
||||
|
||||
req_annotations = ret_attr.__annotations__
|
||||
@ -292,7 +313,7 @@ class WSChiaConnection:
|
||||
|
||||
return invoke
|
||||
|
||||
async def create_request(self, message_no_id: Message, timeout: int) -> Optional[Message]:
|
||||
async def send_request(self, message_no_id: Message, timeout: int) -> Optional[Message]:
|
||||
"""Sends a message and waits for a response."""
|
||||
if self.closed:
|
||||
return None
|
||||
@ -461,6 +482,10 @@ class WSChiaConnection:
|
||||
await asyncio.sleep(3)
|
||||
return None
|
||||
|
||||
# Used by crawler/dns introducer
|
||||
def get_version(self):
|
||||
return self.version
|
||||
|
||||
def get_peer_info(self) -> Optional[PeerInfo]:
|
||||
result = self.ws._writer.transport.get_extra_info("peername")
|
||||
if result is None:
|
||||
|
@ -1,5 +1,7 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from chia.protocols.wallet_protocol import CoinState
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.ints import uint32, uint64
|
||||
@ -24,3 +26,13 @@ class CoinRecord(Streamable):
|
||||
@property
|
||||
def name(self) -> bytes32:
|
||||
return self.coin.name()
|
||||
|
||||
@property
|
||||
def coin_state(self) -> CoinState:
|
||||
spent_h = None
|
||||
if self.spent:
|
||||
spent_h = self.spent_block_index
|
||||
confirmed_height: Optional[uint32] = self.confirmed_block_index
|
||||
if self.confirmed_block_index == 0 and self.timestamp == 0:
|
||||
confirmed_height = None
|
||||
return CoinState(self.coin, spent_h, confirmed_height)
|
||||
|
@ -3,7 +3,7 @@ from typing import List
|
||||
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import SerializedProgram, INFINITE_COST
|
||||
from chia.util.chain_utils import additions_for_solution
|
||||
from chia.util.chain_utils import additions_for_solution, fee_for_solution
|
||||
from chia.util.streamable import Streamable, streamable
|
||||
|
||||
|
||||
@ -22,3 +22,6 @@ class CoinSpend(Streamable):
|
||||
|
||||
def additions(self) -> List[Coin]:
|
||||
return additions_for_solution(self.coin.name(), self.puzzle_reveal, self.solution, INFINITE_COST)
|
||||
|
||||
def reserved_fee(self) -> int:
|
||||
return fee_for_solution(self.puzzle_reveal, self.solution, INFINITE_COST)
|
||||
|
@ -6,6 +6,7 @@ from typing import List
|
||||
|
||||
from blspy import AugSchemeMPL, G2Element
|
||||
|
||||
from chia.consensus.default_constants import DEFAULT_CONSTANTS
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.streamable import Streamable, dataclass_from_dict, recurse_jsonify, streamable
|
||||
@ -61,7 +62,7 @@ class SpendBundle(Streamable):
|
||||
def name(self) -> bytes32:
|
||||
return self.get_hash()
|
||||
|
||||
def debug(self, agg_sig_additional_data=bytes([3] * 32)):
|
||||
def debug(self, agg_sig_additional_data=DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA):
|
||||
debug_spend_bundle(self, agg_sig_additional_data)
|
||||
|
||||
def not_ephemeral_additions(self):
|
||||
|
@ -59,3 +59,14 @@ def execute_task(func):
|
||||
return func
|
||||
|
||||
return inner()
|
||||
|
||||
|
||||
def reply_type(type):
|
||||
def wrap(func):
|
||||
def inner():
|
||||
setattr(func, "reply_type", type)
|
||||
return func
|
||||
|
||||
return inner()
|
||||
|
||||
return wrap
|
||||
|
@ -1,8 +1,11 @@
|
||||
from typing import List
|
||||
|
||||
from clvm.casts import int_from_bytes
|
||||
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import SerializedProgram
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.condition_opcodes import ConditionOpcode
|
||||
from chia.util.condition_tools import (
|
||||
conditions_dict_for_solution,
|
||||
created_outputs_for_conditions_dict,
|
||||
@ -19,3 +22,16 @@ def additions_for_solution(
|
||||
if err or dic is None:
|
||||
return []
|
||||
return created_outputs_for_conditions_dict(dic, coin_name)
|
||||
|
||||
|
||||
def fee_for_solution(puzzle_reveal: SerializedProgram, solution: SerializedProgram, max_cost: int) -> int:
|
||||
err, dic, cost = conditions_dict_for_solution(puzzle_reveal, solution, max_cost)
|
||||
if err or dic is None:
|
||||
return 0
|
||||
|
||||
total = 0
|
||||
for cvp in dic.get(ConditionOpcode.RESERVE_FEE, []):
|
||||
amount_bin = cvp.vars[0]
|
||||
amount = int_from_bytes(amount_bin)
|
||||
total += amount
|
||||
return total
|
||||
|
@ -124,32 +124,6 @@ def puzzle_announcements_for_conditions_dict(
|
||||
return output_announcements
|
||||
|
||||
|
||||
def coin_announcements_names_for_npc(npc_list) -> Set[bytes32]:
|
||||
output_announcements: Set[bytes32] = set()
|
||||
for npc in npc_list:
|
||||
for condition, cvp_list in npc.conditions:
|
||||
if condition == ConditionOpcode.CREATE_COIN_ANNOUNCEMENT:
|
||||
for cvp in cvp_list:
|
||||
message = cvp.vars[0]
|
||||
assert len(message) <= 1024
|
||||
announcement = Announcement(npc.coin_name, message)
|
||||
output_announcements.add(announcement.name())
|
||||
return output_announcements
|
||||
|
||||
|
||||
def puzzle_announcements_names_for_npc(npc_list) -> Set[bytes32]:
|
||||
output_announcements: Set[bytes32] = set()
|
||||
for npc in npc_list:
|
||||
for condition, cvp_list in npc.conditions:
|
||||
if condition == ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT:
|
||||
for cvp in cvp_list:
|
||||
message = cvp.vars[0]
|
||||
assert len(message) <= 1024
|
||||
announcement = Announcement(npc.puzzle_hash, message)
|
||||
output_announcements.add(announcement.name())
|
||||
return output_announcements
|
||||
|
||||
|
||||
def coin_announcement_names_for_conditions_dict(
|
||||
conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]],
|
||||
input_coin: Coin,
|
||||
|
@ -7,7 +7,7 @@ class Err(Enum):
|
||||
DOES_NOT_EXTEND = -1
|
||||
BAD_HEADER_SIGNATURE = -2
|
||||
MISSING_FROM_STORAGE = -3
|
||||
INVALID_PROTOCOL_MESSAGE = -4
|
||||
INVALID_PROTOCOL_MESSAGE = -4 # We WILL ban for a protocol violation.
|
||||
SELF_CONNECTION = -5
|
||||
INVALID_HANDSHAKE = -6
|
||||
INVALID_ACK = -7
|
||||
@ -129,8 +129,8 @@ class Err(Enum):
|
||||
INVALID_PREFARM = 104
|
||||
ASSERT_SECONDS_RELATIVE_FAILED = 105
|
||||
BAD_COINBASE_SIGNATURE = 106
|
||||
# removed
|
||||
# INITIAL_TRANSACTION_FREEZE = 107
|
||||
|
||||
# INITIAL_TRANSACTION_FREEZE = 107 # removed
|
||||
NO_TRANSACTIONS_WHILE_SYNCING = 108
|
||||
ALREADY_INCLUDING_TRANSACTION = 109
|
||||
INCOMPATIBLE_NETWORK_ID = 110
|
||||
@ -151,6 +151,7 @@ class Err(Enum):
|
||||
|
||||
INVALID_FEE_TOO_CLOSE_TO_ZERO = 123
|
||||
COIN_AMOUNT_NEGATIVE = 124
|
||||
INTERNAL_PROTOCOL_ERROR = 125
|
||||
|
||||
|
||||
class ValidationError(Exception):
|
||||
|
@ -4,6 +4,7 @@ min_mainnet_k_size: 32
|
||||
ping_interval: 120
|
||||
self_hostname: &self_hostname "localhost"
|
||||
daemon_port: 55400
|
||||
daemon_max_message_size: 50000000 # maximum size of RPC message in bytes
|
||||
inbound_rate_limit_percent: 100
|
||||
outbound_rate_limit_percent: 30
|
||||
|
||||
@ -65,7 +66,6 @@ network_overrides: &network_overrides
|
||||
MEMPOOL_BLOCK_BUFFER: 10
|
||||
EPOCH_BLOCKS: 768
|
||||
DIFFICULTY_STARTING: 30
|
||||
RUST_CONDITION_CHECKER: 0
|
||||
config:
|
||||
mainnet:
|
||||
address_prefix: "xch"
|
||||
@ -276,6 +276,8 @@ full_node:
|
||||
|
||||
# How often to initiate outbound connections to other full nodes.
|
||||
peer_connect_interval: 30
|
||||
# How long to wait for a peer connection
|
||||
peer_connect_timeout: 30
|
||||
# Accept peers until this number of connections
|
||||
target_peer_count: 80
|
||||
# Initiate outbound connections until this number is hit.
|
||||
|
@ -32,7 +32,7 @@ class KeyringRequiresMigration(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class KeyringCurrentPassphaseIsInvalid(Exception):
|
||||
class KeyringCurrentPassphraseIsInvalid(Exception):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -33,8 +33,8 @@ class KeyringWrapper:
|
||||
# Instance members
|
||||
keys_root_path: Path
|
||||
keyring: Union[Any, FileKeyring] = None
|
||||
cached_passphase: Optional[str] = DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE
|
||||
cached_passphase_is_validated: bool = False
|
||||
cached_passphrase: Optional[str] = DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE
|
||||
cached_passphrase_is_validated: bool = False
|
||||
legacy_keyring = None
|
||||
|
||||
def __init__(self, keys_root_path: Path = DEFAULT_KEYS_ROOT_PATH):
|
||||
@ -135,15 +135,15 @@ class KeyringWrapper:
|
||||
Returns a tuple including the currently cached passphrase and a bool
|
||||
indicating whether the passphrase has been previously validated.
|
||||
"""
|
||||
return self.cached_passphase, self.cached_passphase_is_validated
|
||||
return self.cached_passphrase, self.cached_passphrase_is_validated
|
||||
|
||||
def set_cached_master_passphrase(self, passphrase: Optional[str], validated=False) -> None:
|
||||
"""
|
||||
Cache the provided passphrase and optionally indicate whether the passphrase
|
||||
has been validated.
|
||||
"""
|
||||
self.cached_passphase = passphrase
|
||||
self.cached_passphase_is_validated = validated
|
||||
self.cached_passphrase = passphrase
|
||||
self.cached_passphrase_is_validated = validated
|
||||
|
||||
def has_cached_master_passphrase(self) -> bool:
|
||||
passphrase = self.get_cached_master_passphrase()
|
||||
@ -170,7 +170,7 @@ class KeyringWrapper:
|
||||
Sets a new master passphrase for the keyring
|
||||
"""
|
||||
|
||||
from chia.util.keychain import KeyringCurrentPassphaseIsInvalid, KeyringRequiresMigration
|
||||
from chia.util.keychain import KeyringCurrentPassphraseIsInvalid, KeyringRequiresMigration
|
||||
|
||||
# Require a valid current_passphrase
|
||||
if (
|
||||
@ -178,7 +178,7 @@ class KeyringWrapper:
|
||||
and current_passphrase is not None
|
||||
and not self.master_passphrase_is_valid(current_passphrase)
|
||||
):
|
||||
raise KeyringCurrentPassphaseIsInvalid("invalid current passphrase")
|
||||
raise KeyringCurrentPassphraseIsInvalid("invalid current passphrase")
|
||||
|
||||
self.set_cached_master_passphrase(new_passphrase, validated=True)
|
||||
|
||||
|
@ -256,7 +256,6 @@ class CCWallet:
|
||||
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
|
||||
safe_mode=True,
|
||||
rust_checker=True,
|
||||
)
|
||||
cost_result: uint64 = calculate_cost_of_program(
|
||||
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
|
||||
|
@ -20,9 +20,6 @@ class KeyValStore:
|
||||
self = cls()
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db_connection = db_wrapper.db
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
|
||||
await self.db_connection.execute(
|
||||
("CREATE TABLE IF NOT EXISTS key_val_store(" " key text PRIMARY KEY," " value text)")
|
||||
)
|
||||
|
@ -27,9 +27,6 @@ class TradeStore:
|
||||
self.cache_size = cache_size
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db_connection = db_wrapper.db
|
||||
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
await self.db_connection.execute(
|
||||
(
|
||||
"CREATE TABLE IF NOT EXISTS trade_records("
|
||||
|
@ -7,6 +7,7 @@ from clvm_tools.binutils import disassemble as bu_disassemble
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import Program, INFINITE_COST
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.consensus.default_constants import DEFAULT_CONSTANTS
|
||||
from chia.types.condition_opcodes import ConditionOpcode
|
||||
from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
|
||||
from chia.util.hash import std_hash
|
||||
@ -40,7 +41,7 @@ def dump_coin(coin: Coin) -> str:
|
||||
return disassemble(coin_as_program(coin))
|
||||
|
||||
|
||||
def debug_spend_bundle(spend_bundle, agg_sig_additional_data=bytes([3] * 32)) -> None:
|
||||
def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA) -> None:
|
||||
"""
|
||||
Print a lot of useful information about a `SpendBundle` that might help with debugging
|
||||
its clvm.
|
||||
|
@ -82,7 +82,6 @@ class Wallet:
|
||||
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
|
||||
safe_mode=True,
|
||||
rust_checker=True,
|
||||
)
|
||||
cost_result: uint64 = calculate_cost_of_program(
|
||||
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
|
||||
|
@ -36,9 +36,6 @@ class WalletBlockStore:
|
||||
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db = db_wrapper.db
|
||||
await self.db.execute("pragma journal_mode=wal")
|
||||
await self.db.execute("pragma synchronous=2")
|
||||
|
||||
await self.db.execute(
|
||||
"CREATE TABLE IF NOT EXISTS header_blocks(header_hash text PRIMARY KEY, height int,"
|
||||
" timestamp int, block blob)"
|
||||
|
@ -29,9 +29,6 @@ class WalletCoinStore:
|
||||
|
||||
self.db_connection = wrapper.db
|
||||
self.db_wrapper = wrapper
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
|
||||
await self.db_connection.execute(
|
||||
(
|
||||
"CREATE TABLE IF NOT EXISTS coin_record("
|
||||
|
@ -20,8 +20,6 @@ class WalletInterestedStore:
|
||||
|
||||
self.db_connection = wrapper.db
|
||||
self.db_wrapper = wrapper
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
|
||||
await self.db_connection.execute("CREATE TABLE IF NOT EXISTS interested_coins(coin_name text PRIMARY KEY)")
|
||||
|
||||
|
@ -21,8 +21,6 @@ class WalletPoolStore:
|
||||
|
||||
self.db_connection = wrapper.db
|
||||
self.db_wrapper = wrapper
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
|
||||
await self.db_connection.execute(
|
||||
"CREATE TABLE IF NOT EXISTS pool_state_transitions(transition_index integer, wallet_id integer, "
|
||||
|
@ -35,8 +35,6 @@ class WalletPuzzleStore:
|
||||
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db_connection = self.db_wrapper.db
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
await self.db_connection.execute(
|
||||
(
|
||||
"CREATE TABLE IF NOT EXISTS derivation_paths("
|
||||
|
@ -137,6 +137,9 @@ class WalletStateManager:
|
||||
self.lock = asyncio.Lock()
|
||||
self.log.debug(f"Starting in db path: {db_path}")
|
||||
self.db_connection = await aiosqlite.connect(db_path)
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=NORMAL")
|
||||
|
||||
self.db_wrapper = DBWrapper(self.db_connection)
|
||||
self.coin_store = await WalletCoinStore.create(self.db_wrapper)
|
||||
self.tx_store = await WalletTransactionStore.create(self.db_wrapper)
|
||||
|
@ -29,9 +29,6 @@ class WalletTransactionStore:
|
||||
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db_connection = self.db_wrapper.db
|
||||
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
await self.db_connection.execute(
|
||||
(
|
||||
"CREATE TABLE IF NOT EXISTS transaction_record("
|
||||
|
@ -23,8 +23,6 @@ class WalletUserStore:
|
||||
|
||||
self.db_wrapper = db_wrapper
|
||||
self.db_connection = db_wrapper.db
|
||||
await self.db_connection.execute("pragma journal_mode=wal")
|
||||
await self.db_connection.execute("pragma synchronous=2")
|
||||
await self.db_connection.execute(
|
||||
(
|
||||
"CREATE TABLE IF NOT EXISTS users_wallets("
|
||||
|
@ -121,7 +121,7 @@ python -m pip install -e . --extra-index-url https://pypi.chia.net/simple/
|
||||
|
||||
echo ""
|
||||
echo "Chia blockchain install.sh complete."
|
||||
echo "For assistance join us on Keybase in the #testnet chat channel:"
|
||||
echo "For assistance join us on Keybase in the #support chat channel:"
|
||||
echo "https://keybase.io/team/chia_network.public"
|
||||
echo ""
|
||||
echo "Try the Quick Start Guide to running chia-blockchain:"
|
||||
|
@ -92,7 +92,6 @@ from chia.wallet.derive_keys import (
|
||||
|
||||
test_constants = DEFAULT_CONSTANTS.replace(
|
||||
**{
|
||||
"RUST_CONDITION_CHECKER": 0,
|
||||
"MIN_PLOT_SIZE": 18,
|
||||
"MIN_BLOCKS_PER_CHALLENGE_BLOCK": 12,
|
||||
"DIFFICULTY_STARTING": 2 ** 12,
|
||||
|
@ -88,7 +88,7 @@ class TestGenesisBlock:
|
||||
async def test_non_overflow_genesis(self, empty_blockchain):
|
||||
assert empty_blockchain.get_peak() is None
|
||||
genesis = bt.get_consecutive_blocks(1, force_overflow=False)[0]
|
||||
result, err, _ = await empty_blockchain.receive_block(genesis)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(genesis)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert empty_blockchain.get_peak().height == 0
|
||||
@ -96,21 +96,21 @@ class TestGenesisBlock:
|
||||
@pytest.mark.asyncio
|
||||
async def test_overflow_genesis(self, empty_blockchain):
|
||||
genesis = bt.get_consecutive_blocks(1, force_overflow=True)[0]
|
||||
result, err, _ = await empty_blockchain.receive_block(genesis)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(genesis)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_genesis_empty_slots(self, empty_blockchain):
|
||||
genesis = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=30)[0]
|
||||
result, err, _ = await empty_blockchain.receive_block(genesis)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(genesis)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_overflow_genesis_empty_slots(self, empty_blockchain):
|
||||
genesis = bt.get_consecutive_blocks(1, force_overflow=True, skip_slots=3)[0]
|
||||
result, err, _ = await empty_blockchain.receive_block(genesis)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(genesis)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -119,7 +119,7 @@ class TestGenesisBlock:
|
||||
genesis = bt.get_consecutive_blocks(1, force_overflow=False)[0]
|
||||
bad_prev = bytes([1] * 32)
|
||||
genesis = recursive_replace(genesis, "foliage.prev_block_hash", bad_prev)
|
||||
result, err, _ = await empty_blockchain.receive_block(genesis)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(genesis)
|
||||
assert err == Err.INVALID_PREV_BLOCK_HASH
|
||||
|
||||
|
||||
@ -141,7 +141,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", [new_finished_ss] + block.finished_sub_slots[1:]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_NEW_SUB_SLOT_ITERS
|
||||
new_finished_ss_2 = recursive_replace(
|
||||
block.finished_sub_slots[0],
|
||||
@ -151,7 +151,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_2 = recursive_replace(
|
||||
block, "finished_sub_slots", [new_finished_ss_2] + block.finished_sub_slots[1:]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_2)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_2)
|
||||
assert err == Err.INVALID_NEW_DIFFICULTY
|
||||
|
||||
# 3c
|
||||
@ -168,7 +168,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_3 = recursive_replace(
|
||||
block, "finished_sub_slots", [new_finished_ss_3] + block.finished_sub_slots[1:]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_3)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_3)
|
||||
assert err == Err.INVALID_SUB_EPOCH_SUMMARY
|
||||
|
||||
# 3d
|
||||
@ -185,10 +185,10 @@ class TestBlockHeaderValidation:
|
||||
block_bad_4 = recursive_replace(
|
||||
block, "finished_sub_slots", [new_finished_ss_4] + block.finished_sub_slots[1:]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_4)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_4)
|
||||
assert err == Err.INVALID_SUB_EPOCH_SUMMARY or err == Err.INVALID_NEW_SUB_SLOT_ITERS
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
log.info(
|
||||
@ -202,7 +202,7 @@ class TestBlockHeaderValidation:
|
||||
blockchain = empty_blockchain
|
||||
blocks = bt.get_consecutive_blocks(3)
|
||||
for block in blocks[:-1]:
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
block = blocks[-1]
|
||||
unf = UnfinishedBlock(
|
||||
@ -219,7 +219,7 @@ class TestBlockHeaderValidation:
|
||||
validate_res = await blockchain.validate_unfinished_block(unf, False)
|
||||
err = validate_res.error
|
||||
assert err is None
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
|
||||
block = blocks[-1]
|
||||
unf = UnfinishedBlock(
|
||||
@ -240,7 +240,7 @@ class TestBlockHeaderValidation:
|
||||
async def test_empty_genesis(self, empty_blockchain):
|
||||
blockchain = empty_blockchain
|
||||
for block in bt.get_consecutive_blocks(2, skip_slots=3):
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -249,13 +249,13 @@ class TestBlockHeaderValidation:
|
||||
blockchain = empty_blockchain
|
||||
blocks = bt.get_consecutive_blocks(10)
|
||||
for block in blocks:
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
blocks = bt.get_consecutive_blocks(10, skip_slots=2, block_list_input=blocks)
|
||||
for block in blocks[10:]:
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert blockchain.get_peak().height == 19
|
||||
|
||||
@ -266,7 +266,7 @@ class TestBlockHeaderValidation:
|
||||
blocks = []
|
||||
for i in range(num_blocks):
|
||||
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
|
||||
result, err, _ = await blockchain.receive_block(blocks[-1])
|
||||
result, err, _, _ = await blockchain.receive_block(blocks[-1])
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert blockchain.get_peak().height == num_blocks - 1
|
||||
|
||||
@ -280,7 +280,7 @@ class TestBlockHeaderValidation:
|
||||
num_blocks += i
|
||||
blocks = bt.get_consecutive_blocks(i, block_list_input=blocks, skip_slots=1, force_overflow=True)
|
||||
for block in blocks[-i:]:
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert err is None
|
||||
assert blockchain.get_peak().height == num_blocks - 1
|
||||
@ -329,7 +329,7 @@ class TestBlockHeaderValidation:
|
||||
blocks = []
|
||||
for i in range(num_blocks): # Same thing, but 2 sub-slots per block
|
||||
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2)
|
||||
result, err, _ = await blockchain.receive_block(blocks[-1])
|
||||
result, err, _, _ = await blockchain.receive_block(blocks[-1])
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert blockchain.get_peak().height == num_blocks - 1
|
||||
|
||||
@ -340,7 +340,7 @@ class TestBlockHeaderValidation:
|
||||
blocks = []
|
||||
for i in range(num_blocks): # Same thing, but 5 sub-slots per block
|
||||
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=5)
|
||||
result, err, _ = await blockchain.receive_block(blocks[-1])
|
||||
result, err, _, _ = await blockchain.receive_block(blocks[-1])
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert blockchain.get_peak().height == num_blocks - 1
|
||||
|
||||
@ -348,7 +348,7 @@ class TestBlockHeaderValidation:
|
||||
async def test_basic_chain_overflow(self, empty_blockchain):
|
||||
blocks = bt.get_consecutive_blocks(5, force_overflow=True)
|
||||
for block in blocks:
|
||||
result, err, _ = await empty_blockchain.receive_block(block)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert empty_blockchain.get_peak().height == len(blocks) - 1
|
||||
@ -360,7 +360,7 @@ class TestBlockHeaderValidation:
|
||||
blocks = []
|
||||
for i in range(num_blocks):
|
||||
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2, force_overflow=True)
|
||||
result, err, _ = await blockchain.receive_block(blocks[-1])
|
||||
result, err, _, _ = await blockchain.receive_block(blocks[-1])
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert blockchain.get_peak().height == num_blocks - 1
|
||||
@ -372,7 +372,7 @@ class TestBlockHeaderValidation:
|
||||
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
|
||||
block_1_bad = recursive_replace(blocks[-1], "foliage.prev_block_hash", bytes([0] * 32))
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
assert result == ReceiveBlockResult.DISCONNECTED_BLOCK
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -382,7 +382,7 @@ class TestBlockHeaderValidation:
|
||||
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
|
||||
block_1_bad = recursive_replace(blocks[-1], "reward_chain_block.proof_of_space.proof", bytes([0] * 32))
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
assert result == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.INVALID_POSPACE
|
||||
|
||||
@ -399,7 +399,7 @@ class TestBlockHeaderValidation:
|
||||
blocks[0], "finished_sub_slots", [new_finished_ss] + blocks[0].finished_sub_slots[1:]
|
||||
)
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block_0_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_0_bad)
|
||||
assert result == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
|
||||
|
||||
@ -417,8 +417,8 @@ class TestBlockHeaderValidation:
|
||||
blocks[1], "finished_sub_slots", [new_finished_ss] + blocks[1].finished_sub_slots[1:]
|
||||
)
|
||||
|
||||
_, _, _ = await empty_blockchain.receive_block(blocks[0])
|
||||
result, err, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
_, _, _, _ = await empty_blockchain.receive_block(blocks[0])
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
assert result == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
|
||||
|
||||
@ -436,8 +436,8 @@ class TestBlockHeaderValidation:
|
||||
blocks[1], "finished_sub_slots", blocks[1].finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
|
||||
_, _, _ = await empty_blockchain.receive_block(blocks[0])
|
||||
result, err, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
_, _, _, _ = await empty_blockchain.receive_block(blocks[0])
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
|
||||
assert result == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
|
||||
|
||||
@ -460,7 +460,7 @@ class TestBlockHeaderValidation:
|
||||
blocks[0], "finished_sub_slots", [new_finished_ss] + blocks[0].finished_sub_slots[1:]
|
||||
)
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block_0_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_0_bad)
|
||||
assert result == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.SHOULD_NOT_HAVE_ICC
|
||||
|
||||
@ -489,7 +489,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
result, err, _ = await bc1.receive_block(block_bad)
|
||||
result, err, _, _ = await bc1.receive_block(block_bad)
|
||||
assert err == Err.INVALID_ICC_EOS_VDF
|
||||
|
||||
# Bad output
|
||||
@ -509,7 +509,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_2 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
|
||||
)
|
||||
result, err, _ = await bc1.receive_block(block_bad_2)
|
||||
result, err, _, _ = await bc1.receive_block(block_bad_2)
|
||||
assert err == Err.INVALID_ICC_EOS_VDF
|
||||
|
||||
# Bad challenge hash
|
||||
@ -528,7 +528,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_3 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
|
||||
)
|
||||
result, err, _ = await bc1.receive_block(block_bad_3)
|
||||
result, err, _, _ = await bc1.receive_block(block_bad_3)
|
||||
assert err == Err.INVALID_ICC_EOS_VDF
|
||||
|
||||
# Bad proof
|
||||
@ -540,10 +540,10 @@ class TestBlockHeaderValidation:
|
||||
block_bad_5 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
|
||||
)
|
||||
result, err, _ = await bc1.receive_block(block_bad_5)
|
||||
result, err, _, _ = await bc1.receive_block(block_bad_5)
|
||||
assert err == Err.INVALID_ICC_EOS_VDF
|
||||
|
||||
result, err, _ = await bc1.receive_block(block)
|
||||
result, err, _, _ = await bc1.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -593,7 +593,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
result, err, _ = await blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_ICC_HASH_CC
|
||||
|
||||
# 2i
|
||||
@ -605,7 +605,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_rc]
|
||||
)
|
||||
result, err, _ = await blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_ICC_HASH_RC
|
||||
elif len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is None:
|
||||
# 2j
|
||||
@ -620,7 +620,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_cc]
|
||||
)
|
||||
result, err, _ = await blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_ICC_HASH_CC
|
||||
|
||||
# 2k
|
||||
@ -634,11 +634,11 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_rc]
|
||||
)
|
||||
result, err, _ = await blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_ICC_HASH_RC
|
||||
|
||||
# Finally, add the block properly
|
||||
result, err, _ = await blockchain.receive_block(block)
|
||||
result, err, _, _ = await blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -658,7 +658,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
blocks[-1], "finished_sub_slots", blocks[-1].finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
result, err, _ = await blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -672,11 +672,11 @@ class TestBlockHeaderValidation:
|
||||
blocks_3 = bt.get_consecutive_blocks(1, skip_slots=2, block_list_input=blocks_base, force_overflow=True)
|
||||
blocks_4 = bt.get_consecutive_blocks(1, block_list_input=blocks_base)
|
||||
for block in blocks_base:
|
||||
result, err, _ = await empty_blockchain.receive_block(block)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
for block in [blocks_1[-1], blocks_2[-1], blocks_3[-1], blocks_4[-1]]:
|
||||
result, err, _ = await empty_blockchain.receive_block(block)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block)
|
||||
assert err is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -696,7 +696,7 @@ class TestBlockHeaderValidation:
|
||||
blocks[-1], "finished_sub_slots", blocks[-1].finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
|
||||
result, err, _ = await blockchain.receive_block(block_1_bad)
|
||||
result, err, _, _ = await blockchain.receive_block(block_1_bad)
|
||||
assert result == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.INVALID_CHALLENGE_SLOT_HASH_RC
|
||||
|
||||
@ -725,7 +725,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_CC_EOS_VDF
|
||||
|
||||
# Bad output
|
||||
@ -747,7 +747,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_2 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_2)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_2)
|
||||
assert err == Err.INVALID_CC_EOS_VDF
|
||||
|
||||
# Bad challenge hash
|
||||
@ -769,7 +769,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_3 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_3)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_3)
|
||||
assert err == Err.INVALID_CC_EOS_VDF or err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
|
||||
|
||||
# Bad proof
|
||||
@ -781,10 +781,10 @@ class TestBlockHeaderValidation:
|
||||
block_bad_5 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_5)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_5)
|
||||
assert err == Err.INVALID_CC_EOS_VDF
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -807,7 +807,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_RC_EOS_VDF
|
||||
|
||||
# Bad output
|
||||
@ -823,7 +823,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_2 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_2)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_2)
|
||||
assert err == Err.INVALID_RC_EOS_VDF
|
||||
|
||||
# Bad challenge hash
|
||||
@ -839,7 +839,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad_3 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_3)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_3)
|
||||
assert err == Err.INVALID_RC_EOS_VDF
|
||||
|
||||
# Bad proof
|
||||
@ -851,10 +851,10 @@ class TestBlockHeaderValidation:
|
||||
block_bad_5 = recursive_replace(
|
||||
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad_5)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad_5)
|
||||
assert err == Err.INVALID_RC_EOS_VDF
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(block)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block)
|
||||
assert err is None
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -872,7 +872,7 @@ class TestBlockHeaderValidation:
|
||||
),
|
||||
)
|
||||
block_bad = recursive_replace(block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss])
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_DEFICIT
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -903,10 +903,10 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
blocks[-1], "finished_sub_slots", blocks[-1].finished_sub_slots[:-1] + [new_finished_ss]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_DEFICIT or err == Err.INVALID_ICC_HASH_CC
|
||||
|
||||
result, err, _ = await empty_blockchain.receive_block(blocks[-1])
|
||||
result, err, _, _ = await empty_blockchain.receive_block(blocks[-1])
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -931,7 +931,7 @@ class TestBlockHeaderValidation:
|
||||
),
|
||||
)
|
||||
block_bad = recursive_replace(block, "finished_sub_slots", [new_finished_ss] + block.finished_sub_slots[1:])
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -964,7 +964,7 @@ class TestBlockHeaderValidation:
|
||||
block_bad = recursive_replace(
|
||||
blocks[-1], "finished_sub_slots", [new_finished_ss] + blocks[-1].finished_sub_slots[1:]
|
||||
)
|
||||
result, err, _ = await empty_blockchain.receive_block(block_bad)
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
|
||||
assert err == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
|
||||
return None
|
||||
await empty_blockchain.receive_block(blocks[-1])
|
||||
@ -1592,7 +1592,7 @@ class TestPreValidation:
|
||||
assert res[n].error is None
|
||||
block = blocks_to_validate[n]
|
||||
start_rb = time.time()
|
||||
result, err, _ = await empty_blockchain.receive_block(block, res[n])
|
||||
result, err, _, _ = await empty_blockchain.receive_block(block, res[n])
|
||||
end_rb = time.time()
|
||||
times_rb.append(end_rb - start_rb)
|
||||
assert err is None
|
||||
@ -1658,7 +1658,7 @@ class TestBodyValidation:
|
||||
transaction_data=bundles,
|
||||
time_per_block=10,
|
||||
)
|
||||
assert (await b.receive_block(blocks[-1])) == expected
|
||||
assert (await b.receive_block(blocks[-1]))[0:-1] == expected
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
@ -2437,7 +2437,7 @@ class TestBodyValidation:
|
||||
farmer_reward_puzzle_hash=rewards_ph,
|
||||
)
|
||||
for block in blocks_reorg[-10:]:
|
||||
r, e, _ = await b.receive_block(block)
|
||||
r, e, _, _ = await b.receive_block(block)
|
||||
assert e is None
|
||||
|
||||
# ephemeral coin is spent
|
||||
@ -2556,7 +2556,7 @@ class TestReorgs:
|
||||
|
||||
blocks_reorg_chain = bt.get_consecutive_blocks(7, blocks[:10], seed=b"2")
|
||||
for reorg_block in blocks_reorg_chain:
|
||||
result, error_code, fork_height = await b.receive_block(reorg_block)
|
||||
result, error_code, fork_height, _ = await b.receive_block(reorg_block)
|
||||
if reorg_block.height < 10:
|
||||
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
|
||||
elif reorg_block.height < 14:
|
||||
@ -2594,7 +2594,7 @@ class TestReorgs:
|
||||
)
|
||||
found_orphan = False
|
||||
for reorg_block in blocks_reorg_chain:
|
||||
result, error_code, fork_height = await b.receive_block(reorg_block)
|
||||
result, error_code, fork_height, _ = await b.receive_block(reorg_block)
|
||||
if reorg_block.height < num_blocks_chain_2_start:
|
||||
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
|
||||
if reorg_block.weight <= chain_1_weight:
|
||||
@ -2634,7 +2634,7 @@ class TestReorgs:
|
||||
found_orphan = False
|
||||
blocks_reorg_chain = bt.get_consecutive_blocks(16, [], seed=b"2")
|
||||
for reorg_block in blocks_reorg_chain:
|
||||
result, error_code, fork_height = await b.receive_block(reorg_block)
|
||||
result, error_code, fork_height, _ = await b.receive_block(reorg_block)
|
||||
if reorg_block.height < 14:
|
||||
if result == ReceiveBlockResult.ADDED_AS_ORPHAN:
|
||||
found_orphan = True
|
||||
@ -2646,13 +2646,13 @@ class TestReorgs:
|
||||
# Back to original chain
|
||||
blocks_reorg_chain_2 = bt.get_consecutive_blocks(3, blocks, seed=b"3")
|
||||
|
||||
result, error_code, fork_height = await b.receive_block(blocks_reorg_chain_2[-3])
|
||||
result, error_code, fork_height, _ = await b.receive_block(blocks_reorg_chain_2[-3])
|
||||
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
|
||||
|
||||
result, error_code, fork_height = await b.receive_block(blocks_reorg_chain_2[-2])
|
||||
result, error_code, fork_height, _ = await b.receive_block(blocks_reorg_chain_2[-2])
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
result, error_code, fork_height = await b.receive_block(blocks_reorg_chain_2[-1])
|
||||
result, error_code, fork_height, _ = await b.receive_block(blocks_reorg_chain_2[-1])
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
assert found_orphan
|
||||
assert b.get_peak().height == 17
|
||||
@ -2697,11 +2697,11 @@ class TestReorgs:
|
||||
seed=b"1245",
|
||||
)
|
||||
for block in blocks:
|
||||
result, error_code, _ = await b.receive_block(block)
|
||||
result, error_code, _, _ = await b.receive_block(block)
|
||||
assert error_code is None and result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
for block in blocks_fork:
|
||||
result, error_code, _ = await b.receive_block(block)
|
||||
result, error_code, _, _ = await b.receive_block(block)
|
||||
assert error_code is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -2744,7 +2744,7 @@ class TestReorgs:
|
||||
heights = []
|
||||
for block in default_1000_blocks[:200]:
|
||||
heights.append(block.height)
|
||||
result, error_code, _ = await b.receive_block(block)
|
||||
result, error_code, _, _ = await b.receive_block(block)
|
||||
assert error_code is None and result == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
blocks = await b.get_block_records_at(heights, batch_size=2)
|
||||
|
@ -134,7 +134,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
next_block = new_blocks[-1]
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(next_block)
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(next_block)
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.DOUBLE_SPEND
|
||||
|
||||
@ -174,7 +174,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
next_block = new_blocks[-1]
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(next_block)
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(next_block)
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.DUPLICATE_OUTPUT
|
||||
|
||||
@ -233,7 +233,7 @@ class TestBlockchainTransactions:
|
||||
transaction_data=spend_bundle,
|
||||
)
|
||||
|
||||
res, err, _ = await full_node_api_1.full_node.blockchain.receive_block(new_blocks[-1])
|
||||
res, err, _, _ = await full_node_api_1.full_node.blockchain.receive_block(new_blocks[-1])
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -246,7 +246,7 @@ class TestBlockchainTransactions:
|
||||
transaction_data=spend_bundle,
|
||||
)
|
||||
|
||||
res, err, _ = await full_node_api_1.full_node.blockchain.receive_block(new_blocks_double[-1])
|
||||
res, err, _, _ = await full_node_api_1.full_node.blockchain.receive_block(new_blocks_double[-1])
|
||||
assert err is Err.DOUBLE_SPEND
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
|
||||
@ -288,8 +288,7 @@ class TestBlockchainTransactions:
|
||||
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_validate_blockchain_spend_reorg_coin(self, two_nodes, rust_checker: bool):
|
||||
async def test_validate_blockchain_spend_reorg_coin(self, two_nodes):
|
||||
num_blocks = 10
|
||||
wallet_a = WALLET_A
|
||||
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
|
||||
@ -328,7 +327,7 @@ class TestBlockchainTransactions:
|
||||
|
||||
coin_2 = None
|
||||
for coin in run_and_get_removals_and_additions(
|
||||
new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM, test_constants.COST_PER_BYTE, rust_checker=rust_checker
|
||||
new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM, test_constants.COST_PER_BYTE
|
||||
)[1]:
|
||||
if coin.puzzle_hash == receiver_1_puzzlehash:
|
||||
coin_2 = coin
|
||||
@ -349,7 +348,7 @@ class TestBlockchainTransactions:
|
||||
|
||||
coin_3 = None
|
||||
for coin in run_and_get_removals_and_additions(
|
||||
new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM, test_constants.COST_PER_BYTE, rust_checker=rust_checker
|
||||
new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM, test_constants.COST_PER_BYTE
|
||||
)[1]:
|
||||
if coin.puzzle_hash == receiver_2_puzzlehash:
|
||||
coin_3 = coin
|
||||
@ -513,7 +512,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_MY_COIN_ID_FAILED
|
||||
|
||||
@ -526,7 +525,7 @@ class TestBlockchainTransactions:
|
||||
transaction_data=valid_spend_bundle,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
assert err is None
|
||||
|
||||
@ -593,7 +592,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
|
||||
|
||||
@ -610,7 +609,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate newly created block
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
assert err is None
|
||||
|
||||
@ -677,7 +676,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
|
||||
|
||||
@ -694,7 +693,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate newly created block
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
assert err is None
|
||||
|
||||
@ -740,7 +739,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block at index 10
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_HEIGHT_ABSOLUTE_FAILED
|
||||
|
||||
@ -750,7 +749,7 @@ class TestBlockchainTransactions:
|
||||
farmer_reward_puzzle_hash=coinbase_puzzlehash,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
res, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, _, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
# At index 11, it can be spent
|
||||
@ -761,7 +760,7 @@ class TestBlockchainTransactions:
|
||||
transaction_data=block1_spend_bundle,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -809,7 +808,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block at index 11
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_HEIGHT_RELATIVE_FAILED
|
||||
|
||||
@ -819,7 +818,7 @@ class TestBlockchainTransactions:
|
||||
farmer_reward_puzzle_hash=coinbase_puzzlehash,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
res, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, _, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
# At index 12, it can be spent
|
||||
@ -830,7 +829,7 @@ class TestBlockchainTransactions:
|
||||
transaction_data=block1_spend_bundle,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -878,7 +877,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block before 300 sec
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_SECONDS_RELATIVE_FAILED
|
||||
|
||||
@ -890,7 +889,7 @@ class TestBlockchainTransactions:
|
||||
guarantee_transaction_block=True,
|
||||
time_per_block=301,
|
||||
)
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -939,7 +938,7 @@ class TestBlockchainTransactions:
|
||||
)
|
||||
|
||||
# Try to validate that block before 30 sec
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.ASSERT_SECONDS_ABSOLUTE_FAILED
|
||||
|
||||
@ -951,7 +950,7 @@ class TestBlockchainTransactions:
|
||||
guarantee_transaction_block=True,
|
||||
time_per_block=31,
|
||||
)
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
||||
@ -1001,7 +1000,7 @@ class TestBlockchainTransactions:
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
|
||||
assert res == ReceiveBlockResult.INVALID_BLOCK
|
||||
assert err == Err.RESERVE_FEE_CONDITION_FAILED
|
||||
|
||||
@ -1012,6 +1011,6 @@ class TestBlockchainTransactions:
|
||||
transaction_data=block1_spend_bundle_good,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
res, err, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
|
||||
res, err, _, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
|
@ -1,18 +1,17 @@
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, replace
|
||||
from typing import Dict, Iterator, Optional, Set
|
||||
from typing import Dict, Iterator, Optional
|
||||
|
||||
from chia.full_node.mempool_check_conditions import mempool_check_conditions_dict # noqa
|
||||
from chia.util.condition_tools import created_outputs_for_conditions_dict
|
||||
from chia.full_node.mempool_check_conditions import mempool_check_conditions_dict, get_name_puzzle_conditions
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.coin_record import CoinRecord
|
||||
from chia.types.spend_bundle import SpendBundle
|
||||
from chia.util.condition_tools import (
|
||||
conditions_dict_for_solution,
|
||||
coin_announcement_names_for_conditions_dict,
|
||||
puzzle_announcement_names_for_conditions_dict,
|
||||
)
|
||||
from chia.util.ints import uint32, uint64
|
||||
from chia.full_node.bundle_tools import simple_solution_generator
|
||||
from chia.util.errors import Err
|
||||
from chia.consensus.cost_calculator import NPCResult
|
||||
|
||||
|
||||
MAX_COST = 11000000000
|
||||
@ -60,62 +59,42 @@ class CoinStore:
|
||||
spend_bundle: SpendBundle,
|
||||
now: CoinTimestamp,
|
||||
max_cost: int,
|
||||
cost_per_byte: int,
|
||||
) -> int:
|
||||
# this should use blockchain consensus code
|
||||
|
||||
coin_announcements: Set[bytes32] = set()
|
||||
puzzle_announcements: Set[bytes32] = set()
|
||||
|
||||
conditions_dicts = []
|
||||
for coin_spend in spend_bundle.coin_spends:
|
||||
assert isinstance(coin_spend.coin, Coin)
|
||||
err, conditions_dict, cost = conditions_dict_for_solution(
|
||||
coin_spend.puzzle_reveal, coin_spend.solution, max_cost
|
||||
)
|
||||
if conditions_dict is None:
|
||||
raise BadSpendBundleError(f"clvm validation failure {err}")
|
||||
conditions_dicts.append(conditions_dict)
|
||||
coin_announcements.update(
|
||||
coin_announcement_names_for_conditions_dict(
|
||||
conditions_dict,
|
||||
coin_spend.coin,
|
||||
)
|
||||
)
|
||||
puzzle_announcements.update(
|
||||
puzzle_announcement_names_for_conditions_dict(
|
||||
conditions_dict,
|
||||
coin_spend.coin,
|
||||
)
|
||||
)
|
||||
program = simple_solution_generator(spend_bundle)
|
||||
result: NPCResult = get_name_puzzle_conditions(program, max_cost, cost_per_byte=cost_per_byte, safe_mode=True)
|
||||
if result.error is not None:
|
||||
raise BadSpendBundleError(f"condition validation failure {Err(result.error)}")
|
||||
|
||||
ephemeral_db = dict(self._db)
|
||||
for coin in spend_bundle.additions():
|
||||
name = coin.name()
|
||||
ephemeral_db[name] = CoinRecord(
|
||||
coin,
|
||||
uint32(now.height),
|
||||
uint32(0),
|
||||
False,
|
||||
False,
|
||||
uint64(now.seconds),
|
||||
)
|
||||
for npc in result.npc_list:
|
||||
for coin in created_outputs_for_conditions_dict(npc.condition_dict, npc.coin_name):
|
||||
name = coin.name()
|
||||
ephemeral_db[name] = CoinRecord(
|
||||
coin,
|
||||
uint32(now.height),
|
||||
uint32(0),
|
||||
False,
|
||||
False,
|
||||
uint64(now.seconds),
|
||||
)
|
||||
|
||||
for coin_spend, conditions_dict in zip(spend_bundle.coin_spends, conditions_dicts): # noqa
|
||||
prev_transaction_block_height = now.height
|
||||
timestamp = now.seconds
|
||||
coin_record = ephemeral_db.get(coin_spend.coin.name())
|
||||
for npc in result.npc_list:
|
||||
prev_transaction_block_height = uint32(now.height)
|
||||
timestamp = uint64(now.seconds)
|
||||
coin_record = ephemeral_db.get(npc.coin_name)
|
||||
if coin_record is None:
|
||||
raise BadSpendBundleError(f"coin not found for id 0x{coin_spend.coin.name().hex()}") # noqa
|
||||
raise BadSpendBundleError(f"coin not found for id 0x{npc.coin_name.hex()}") # noqa
|
||||
err = mempool_check_conditions_dict(
|
||||
coin_record,
|
||||
coin_announcements,
|
||||
puzzle_announcements,
|
||||
conditions_dict,
|
||||
uint32(prev_transaction_block_height),
|
||||
uint64(timestamp),
|
||||
npc.condition_dict,
|
||||
prev_transaction_block_height,
|
||||
timestamp,
|
||||
)
|
||||
if err is not None:
|
||||
raise BadSpendBundleError(f"condition validation failure {err}")
|
||||
if err is not None:
|
||||
raise BadSpendBundleError(f"condition validation failure {Err(err)}")
|
||||
|
||||
return 0
|
||||
|
||||
@ -124,8 +103,9 @@ class CoinStore:
|
||||
spend_bundle: SpendBundle,
|
||||
now: CoinTimestamp,
|
||||
max_cost: int,
|
||||
cost_per_byte: int,
|
||||
):
|
||||
err = self.validate_spend_bundle(spend_bundle, now, max_cost)
|
||||
err = self.validate_spend_bundle(spend_bundle, now, max_cost, cost_per_byte)
|
||||
if err != 0:
|
||||
raise BadSpendBundleError(f"validation failure {err}")
|
||||
additions = spend_bundle.additions()
|
||||
|
@ -26,6 +26,7 @@ T1 = CoinTimestamp(1, 10000000)
|
||||
T2 = CoinTimestamp(5, 10003000)
|
||||
|
||||
MAX_BLOCK_COST_CLVM = int(1e18)
|
||||
COST_PER_BYTE = int(12000)
|
||||
|
||||
|
||||
def secret_exponent_for_index(index: int) -> int:
|
||||
@ -72,7 +73,7 @@ def do_test_spend(
|
||||
coin_spend = CoinSpend(coin, puzzle_reveal, solution)
|
||||
|
||||
spend_bundle = SpendBundle([coin_spend], G2Element())
|
||||
coin_db.update_coin_store_for_spend_bundle(spend_bundle, spend_time, MAX_BLOCK_COST_CLVM)
|
||||
coin_db.update_coin_store_for_spend_bundle(spend_bundle, spend_time, MAX_BLOCK_COST_CLVM, COST_PER_BYTE)
|
||||
|
||||
# ensure all outputs are there
|
||||
for puzzle_hash, amount in payments:
|
||||
|
@ -28,7 +28,9 @@ async def disconnect_all_and_reconnect(server: ChiaServer, reconnect_to: ChiaSer
|
||||
return await server.start_client(PeerInfo(self_hostname, uint16(reconnect_to._port)), None)
|
||||
|
||||
|
||||
async def add_dummy_connection(server: ChiaServer, dummy_port: int) -> Tuple[asyncio.Queue, bytes32]:
|
||||
async def add_dummy_connection(
|
||||
server: ChiaServer, dummy_port: int, type: NodeType = NodeType.FULL_NODE
|
||||
) -> Tuple[asyncio.Queue, bytes32]:
|
||||
timeout = aiohttp.ClientTimeout(total=10)
|
||||
session = aiohttp.ClientSession(timeout=timeout)
|
||||
incoming_queue: asyncio.Queue = asyncio.Queue()
|
||||
@ -46,7 +48,7 @@ async def add_dummy_connection(server: ChiaServer, dummy_port: int) -> Tuple[asy
|
||||
url = f"wss://{self_hostname}:{server._port}/ws"
|
||||
ws = await session.ws_connect(url, autoclose=True, autoping=True, ssl=ssl_context)
|
||||
wsc = WSChiaConnection(
|
||||
NodeType.FULL_NODE,
|
||||
type,
|
||||
ws,
|
||||
server._port,
|
||||
log,
|
||||
|
@ -34,12 +34,12 @@ async def create_blockchain(constants: ConsensusConstants):
|
||||
return bc1, connection, db_path
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", params=[0, 10000000])
|
||||
@pytest.fixture(scope="function")
|
||||
async def empty_blockchain(request):
|
||||
"""
|
||||
Provides a list of 10 valid blocks, as well as a blockchain with 9 blocks added to it.
|
||||
"""
|
||||
bc1, connection, db_path = await create_blockchain(test_constants.replace(RUST_CONDITION_CHECKER=request.param))
|
||||
bc1, connection, db_path = await create_blockchain(test_constants)
|
||||
yield bc1
|
||||
|
||||
await connection.close()
|
||||
|
@ -5,6 +5,7 @@ from typing import List, Optional, Set, Tuple
|
||||
|
||||
import aiosqlite
|
||||
import pytest
|
||||
import tempfile
|
||||
|
||||
from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
|
||||
from chia.consensus.blockchain import Blockchain, ReceiveBlockResult
|
||||
@ -21,6 +22,7 @@ from chia.util.ints import uint64, uint32
|
||||
from tests.wallet_tools import WalletTool
|
||||
from chia.util.db_wrapper import DBWrapper
|
||||
from tests.setup_nodes import bt, test_constants
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@ -54,39 +56,47 @@ def get_future_reward_coins(block: FullBlock) -> Tuple[Coin, Coin]:
|
||||
return pool_coin, farmer_coin
|
||||
|
||||
|
||||
class TestCoinStore:
|
||||
class DBConnection:
|
||||
async def __aenter__(self) -> DBWrapper:
|
||||
self.db_path = Path(tempfile.NamedTemporaryFile().name)
|
||||
if self.db_path.exists():
|
||||
self.db_path.unlink()
|
||||
self.connection = await aiosqlite.connect(self.db_path)
|
||||
return DBWrapper(self.connection)
|
||||
|
||||
async def __aexit__(self, exc_t, exc_v, exc_tb):
|
||||
await self.connection.close()
|
||||
self.db_path.unlink()
|
||||
|
||||
|
||||
class TestCoinStoreWithBlocks:
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_basic_coin_store(self, rust_checker: bool):
|
||||
@pytest.mark.parametrize("cache_size", [0])
|
||||
async def test_basic_coin_store(self, cache_size: uint32):
|
||||
wallet_a = WALLET_A
|
||||
reward_ph = wallet_a.get_new_puzzlehash()
|
||||
|
||||
for cache_size in [0]:
|
||||
# Generate some coins
|
||||
blocks = bt.get_consecutive_blocks(
|
||||
10,
|
||||
[],
|
||||
farmer_reward_puzzle_hash=reward_ph,
|
||||
pool_reward_puzzle_hash=reward_ph,
|
||||
)
|
||||
# Generate some coins
|
||||
blocks = bt.get_consecutive_blocks(
|
||||
10,
|
||||
[],
|
||||
farmer_reward_puzzle_hash=reward_ph,
|
||||
pool_reward_puzzle_hash=reward_ph,
|
||||
)
|
||||
|
||||
coins_to_spend: List[Coin] = []
|
||||
for block in blocks:
|
||||
if block.is_transaction_block():
|
||||
for coin in block.get_included_reward_coins():
|
||||
if coin.puzzle_hash == reward_ph:
|
||||
coins_to_spend.append(coin)
|
||||
coins_to_spend: List[Coin] = []
|
||||
for block in blocks:
|
||||
if block.is_transaction_block():
|
||||
for coin in block.get_included_reward_coins():
|
||||
if coin.puzzle_hash == reward_ph:
|
||||
coins_to_spend.append(coin)
|
||||
|
||||
spend_bundle = wallet_a.generate_signed_transaction(
|
||||
uint64(1000), wallet_a.get_new_puzzlehash(), coins_to_spend[0]
|
||||
)
|
||||
spend_bundle = wallet_a.generate_signed_transaction(
|
||||
uint64(1000), wallet_a.get_new_puzzlehash(), coins_to_spend[0]
|
||||
)
|
||||
|
||||
db_path = Path("fndb_test.db")
|
||||
if db_path.exists():
|
||||
db_path.unlink()
|
||||
connection = await aiosqlite.connect(db_path)
|
||||
db_wrapper = DBWrapper(connection)
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=uint32(cache_size))
|
||||
async with DBConnection() as db_wrapper:
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=cache_size)
|
||||
|
||||
blocks = bt.get_consecutive_blocks(
|
||||
10,
|
||||
@ -111,7 +121,6 @@ class TestCoinStore:
|
||||
bt.constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=bt.constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
|
||||
else:
|
||||
@ -119,11 +128,25 @@ class TestCoinStore:
|
||||
|
||||
assert block.get_included_reward_coins() == should_be_included_prev
|
||||
|
||||
await coin_store.new_block(block, tx_additions, tx_removals)
|
||||
if block.is_transaction_block():
|
||||
assert block.foliage_transaction_block is not None
|
||||
await coin_store.new_block(
|
||||
block.height,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
block.get_included_reward_coins(),
|
||||
tx_additions,
|
||||
tx_removals,
|
||||
)
|
||||
|
||||
if block.height != 0:
|
||||
with pytest.raises(Exception):
|
||||
await coin_store.new_block(block, tx_additions, tx_removals)
|
||||
if block.height != 0:
|
||||
with pytest.raises(Exception):
|
||||
await coin_store.new_block(
|
||||
block.height,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
block.get_included_reward_coins(),
|
||||
tx_additions,
|
||||
tx_removals,
|
||||
)
|
||||
|
||||
for expected_coin in should_be_included_prev:
|
||||
# Check that the coinbase rewards are added
|
||||
@ -144,26 +167,30 @@ class TestCoinStore:
|
||||
should_be_included_prev = should_be_included.copy()
|
||||
should_be_included = set()
|
||||
|
||||
await connection.close()
|
||||
Path("fndb_test.db").unlink()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_set_spent(self):
|
||||
@pytest.mark.parametrize("cache_size", [0, 10, 100000])
|
||||
async def test_set_spent(self, cache_size: uint32):
|
||||
blocks = bt.get_consecutive_blocks(9, [])
|
||||
|
||||
for cache_size in [0, 10, 100000]:
|
||||
db_path = Path("fndb_test.db")
|
||||
if db_path.exists():
|
||||
db_path.unlink()
|
||||
connection = await aiosqlite.connect(db_path)
|
||||
db_wrapper = DBWrapper(connection)
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=uint32(cache_size))
|
||||
async with DBConnection() as db_wrapper:
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=cache_size)
|
||||
|
||||
# Save/get block
|
||||
for block in blocks:
|
||||
if block.is_transaction_block():
|
||||
removals, additions = [], []
|
||||
await coin_store.new_block(block, additions, removals)
|
||||
removals: List[bytes32] = []
|
||||
additions: List[Coin] = []
|
||||
|
||||
if block.is_transaction_block():
|
||||
assert block.foliage_transaction_block is not None
|
||||
await coin_store.new_block(
|
||||
block.height,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
block.get_included_reward_coins(),
|
||||
additions,
|
||||
removals,
|
||||
)
|
||||
|
||||
coins = block.get_included_reward_coins()
|
||||
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]
|
||||
|
||||
@ -177,37 +204,41 @@ class TestCoinStore:
|
||||
assert record.spent
|
||||
assert record.spent_block_index == block.height
|
||||
|
||||
await connection.close()
|
||||
Path("fndb_test.db").unlink()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_rollback(self):
|
||||
@pytest.mark.parametrize("cache_size", [0, 10, 100000])
|
||||
async def test_rollback(self, cache_size: uint32):
|
||||
blocks = bt.get_consecutive_blocks(20)
|
||||
|
||||
for cache_size in [0, 10, 100000]:
|
||||
db_path = Path("fndb_test.db")
|
||||
if db_path.exists():
|
||||
db_path.unlink()
|
||||
connection = await aiosqlite.connect(db_path)
|
||||
db_wrapper = DBWrapper(connection)
|
||||
async with DBConnection() as db_wrapper:
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=uint32(cache_size))
|
||||
|
||||
records: List[Optional[CoinRecord]] = []
|
||||
|
||||
for block in blocks:
|
||||
if block.is_transaction_block():
|
||||
removals, additions = [], []
|
||||
await coin_store.new_block(block, additions, removals)
|
||||
removals: List[bytes32] = []
|
||||
additions: List[Coin] = []
|
||||
|
||||
if block.is_transaction_block():
|
||||
assert block.foliage_transaction_block is not None
|
||||
await coin_store.new_block(
|
||||
block.height,
|
||||
block.foliage_transaction_block.timestamp,
|
||||
block.get_included_reward_coins(),
|
||||
additions,
|
||||
removals,
|
||||
)
|
||||
|
||||
coins = block.get_included_reward_coins()
|
||||
records: List[Optional[CoinRecord]] = [
|
||||
await coin_store.get_coin_record(coin.name()) for coin in coins
|
||||
]
|
||||
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]
|
||||
|
||||
for record in records:
|
||||
assert record is not None
|
||||
await coin_store._set_spent(record.coin.name(), block.height)
|
||||
|
||||
records: List[Optional[CoinRecord]] = [
|
||||
await coin_store.get_coin_record(coin.name()) for coin in coins
|
||||
]
|
||||
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]
|
||||
for record in records:
|
||||
assert record is not None
|
||||
assert record.spent
|
||||
assert record.spent_block_index == block.height
|
||||
|
||||
@ -217,9 +248,7 @@ class TestCoinStore:
|
||||
for block in blocks:
|
||||
if block.is_transaction_block():
|
||||
coins = block.get_included_reward_coins()
|
||||
records: List[Optional[CoinRecord]] = [
|
||||
await coin_store.get_coin_record(coin.name()) for coin in coins
|
||||
]
|
||||
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]
|
||||
|
||||
if block.height <= reorg_index:
|
||||
for record in records:
|
||||
@ -229,36 +258,33 @@ class TestCoinStore:
|
||||
for record in records:
|
||||
assert record is None
|
||||
|
||||
await connection.close()
|
||||
Path("fndb_test.db").unlink()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_basic_reorg(self):
|
||||
for cache_size in [0, 10, 100000]:
|
||||
@pytest.mark.parametrize("cache_size", [0, 10, 100000])
|
||||
async def test_basic_reorg(self, cache_size: uint32):
|
||||
|
||||
async with DBConnection() as db_wrapper:
|
||||
initial_block_count = 30
|
||||
reorg_length = 15
|
||||
blocks = bt.get_consecutive_blocks(initial_block_count)
|
||||
db_path = Path("blockchain_test.db")
|
||||
if db_path.exists():
|
||||
db_path.unlink()
|
||||
connection = await aiosqlite.connect(db_path)
|
||||
db_wrapper = DBWrapper(connection)
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=uint32(cache_size))
|
||||
store = await BlockStore.create(db_wrapper)
|
||||
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
|
||||
try:
|
||||
|
||||
records: List[Optional[CoinRecord]] = []
|
||||
|
||||
for block in blocks:
|
||||
await b.receive_block(block)
|
||||
assert b.get_peak().height == initial_block_count - 1
|
||||
peak = b.get_peak()
|
||||
assert peak is not None
|
||||
assert peak.height == initial_block_count - 1
|
||||
|
||||
for c, block in enumerate(blocks):
|
||||
if block.is_transaction_block():
|
||||
coins = block.get_included_reward_coins()
|
||||
records: List[Optional[CoinRecord]] = [
|
||||
await coin_store.get_coin_record(coin.name()) for coin in coins
|
||||
]
|
||||
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]
|
||||
for record in records:
|
||||
assert record is not None
|
||||
assert not record.spent
|
||||
assert record.confirmed_block_index == block.height
|
||||
assert record.spent_block_index == 0
|
||||
@ -268,7 +294,7 @@ class TestCoinStore:
|
||||
)
|
||||
|
||||
for reorg_block in blocks_reorg_chain:
|
||||
result, error_code, _ = await b.receive_block(reorg_block)
|
||||
result, error_code, _, _ = await b.receive_block(reorg_block)
|
||||
print(f"Height {reorg_block.height} {initial_block_count - 10} result {result}")
|
||||
if reorg_block.height < initial_block_count - 10:
|
||||
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
|
||||
@ -278,28 +304,23 @@ class TestCoinStore:
|
||||
assert result == ReceiveBlockResult.NEW_PEAK
|
||||
if reorg_block.is_transaction_block():
|
||||
coins = reorg_block.get_included_reward_coins()
|
||||
records: List[Optional[CoinRecord]] = [
|
||||
await coin_store.get_coin_record(coin.name()) for coin in coins
|
||||
]
|
||||
records = [await coin_store.get_coin_record(coin.name()) for coin in coins]
|
||||
for record in records:
|
||||
assert record is not None
|
||||
assert not record.spent
|
||||
assert record.confirmed_block_index == reorg_block.height
|
||||
assert record.spent_block_index == 0
|
||||
assert error_code is None
|
||||
assert b.get_peak().height == initial_block_count - 10 + reorg_length - 1
|
||||
except Exception as e:
|
||||
await connection.close()
|
||||
Path("blockchain_test.db").unlink()
|
||||
peak = b.get_peak()
|
||||
assert peak is not None
|
||||
assert peak.height == initial_block_count - 10 + reorg_length - 1
|
||||
finally:
|
||||
b.shut_down()
|
||||
raise e
|
||||
|
||||
await connection.close()
|
||||
Path("blockchain_test.db").unlink()
|
||||
b.shut_down()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_puzzle_hash(self):
|
||||
for cache_size in [0, 10, 100000]:
|
||||
@pytest.mark.parametrize("cache_size", [0, 10, 100000])
|
||||
async def test_get_puzzle_hash(self, cache_size: uint32):
|
||||
async with DBConnection() as db_wrapper:
|
||||
num_blocks = 20
|
||||
farmer_ph = 32 * b"0"
|
||||
pool_ph = 32 * b"1"
|
||||
@ -309,19 +330,16 @@ class TestCoinStore:
|
||||
pool_reward_puzzle_hash=pool_ph,
|
||||
guarantee_transaction_block=True,
|
||||
)
|
||||
db_path = Path("blockchain_test.db")
|
||||
if db_path.exists():
|
||||
db_path.unlink()
|
||||
connection = await aiosqlite.connect(db_path)
|
||||
db_wrapper = DBWrapper(connection)
|
||||
coin_store = await CoinStore.create(db_wrapper, cache_size=uint32(cache_size))
|
||||
store = await BlockStore.create(db_wrapper)
|
||||
b: Blockchain = await Blockchain.create(coin_store, store, test_constants)
|
||||
for block in blocks:
|
||||
res, err, _ = await b.receive_block(block)
|
||||
res, err, _, _ = await b.receive_block(block)
|
||||
assert err is None
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
assert b.get_peak().height == num_blocks - 1
|
||||
peak = b.get_peak()
|
||||
assert peak is not None
|
||||
assert peak.height == num_blocks - 1
|
||||
|
||||
coins_farmer = await coin_store.get_coin_records_by_puzzle_hash(True, pool_ph)
|
||||
coins_pool = await coin_store.get_coin_records_by_puzzle_hash(True, farmer_ph)
|
||||
@ -329,6 +347,4 @@ class TestCoinStore:
|
||||
assert len(coins_farmer) == num_blocks - 2
|
||||
assert len(coins_pool) == num_blocks - 2
|
||||
|
||||
await connection.close()
|
||||
Path("blockchain_test.db").unlink()
|
||||
b.shut_down()
|
||||
|
@ -76,7 +76,7 @@ async def check_spend_bundle_validity(
|
||||
try:
|
||||
connection, blockchain = await create_ram_blockchain(constants)
|
||||
for block in blocks:
|
||||
received_block_result, err, fork_height = await blockchain.receive_block(block)
|
||||
received_block_result, err, fork_height, coin_changes = await blockchain.receive_block(block)
|
||||
assert err is None
|
||||
|
||||
additional_blocks = bt.get_consecutive_blocks(
|
||||
@ -87,7 +87,7 @@ async def check_spend_bundle_validity(
|
||||
)
|
||||
newest_block = additional_blocks[-1]
|
||||
|
||||
received_block_result, err, fork_height = await blockchain.receive_block(newest_block)
|
||||
received_block_result, err, fork_height, coin_changes = await blockchain.receive_block(newest_block)
|
||||
|
||||
if fork_height:
|
||||
coins_added = await blockchain.coin_store.get_coins_added_at_height(uint32(fork_height + 1))
|
||||
|
@ -1385,7 +1385,7 @@ class TestFullNodeProtocol:
|
||||
invalid_program = SerializedProgram.from_bytes(large_puzzle_reveal)
|
||||
invalid_block = dataclasses.replace(invalid_block, transactions_generator=invalid_program)
|
||||
|
||||
result, error, fork_h = await full_node_1.full_node.blockchain.receive_block(invalid_block)
|
||||
result, error, fork_h, _ = await full_node_1.full_node.blockchain.receive_block(invalid_block)
|
||||
assert error is not None
|
||||
assert error == Err.PRE_SOFT_FORK_MAX_GENERATOR_SIZE
|
||||
|
||||
|
@ -233,7 +233,7 @@ class TestFullNodeStore:
|
||||
normalized_to_identity_cc_sp=normalized_to_identity,
|
||||
)
|
||||
for block in blocks_reorg:
|
||||
res, _, fork_height = await blockchain.receive_block(block)
|
||||
res, _, fork_height, _ = await blockchain.receive_block(block)
|
||||
|
||||
if res == ReceiveBlockResult.NEW_PEAK:
|
||||
if fork_height is not None and fork_height != block.height - 1:
|
||||
@ -286,7 +286,7 @@ class TestFullNodeStore:
|
||||
normalized_to_identity_cc_ip=normalized_to_identity,
|
||||
normalized_to_identity_cc_sp=normalized_to_identity,
|
||||
)
|
||||
res, _, fork_height = await blockchain.receive_block(blocks[-1])
|
||||
res, _, fork_height, _ = await blockchain.receive_block(blocks[-1])
|
||||
if res == ReceiveBlockResult.NEW_PEAK:
|
||||
if fork_height is not None and fork_height != blocks[-1].height - 1:
|
||||
fork_block = blockchain.block_record(blockchain.height_to_hash(fork_height))
|
||||
@ -736,7 +736,7 @@ class TestFullNodeStore:
|
||||
for block in blocks:
|
||||
for sub_slot in block.finished_sub_slots:
|
||||
assert store.new_finished_sub_slot(sub_slot, blockchain, peak, peak_full_block) is not None
|
||||
res, err, _ = await blockchain.receive_block(block)
|
||||
res, err, _, _ = await blockchain.receive_block(block)
|
||||
assert res == ReceiveBlockResult.NEW_PEAK
|
||||
peak = blockchain.get_peak()
|
||||
peak_full_block = await blockchain.get_full_peak()
|
||||
|
@ -4,8 +4,6 @@ import logging
|
||||
from typing import Dict, List, Optional, Tuple, Callable
|
||||
|
||||
import pytest
|
||||
from clvm import SExp
|
||||
from clvm.EvalError import EvalError
|
||||
|
||||
import chia.server.ws_connection as ws
|
||||
|
||||
@ -22,12 +20,12 @@ from chia.types.spend_bundle import SpendBundle
|
||||
from chia.types.mempool_item import MempoolItem
|
||||
from chia.util.clvm import int_to_bytes
|
||||
from chia.util.condition_tools import conditions_for_solution
|
||||
from chia.util.errors import Err, ValidationError
|
||||
from chia.util.errors import Err
|
||||
from chia.util.ints import uint64
|
||||
from chia.util.hash import std_hash
|
||||
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
|
||||
from chia.util.api_decorators import api_request, peer_required, bytes_required
|
||||
from chia.full_node.mempool_check_conditions import parse_condition_args, parse_condition, get_name_puzzle_conditions
|
||||
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
|
||||
from chia.full_node.pending_tx_cache import PendingTxCache
|
||||
from blspy import G2Element
|
||||
|
||||
@ -36,7 +34,6 @@ from tests.core.node_height import node_height_at_least
|
||||
from tests.setup_nodes import bt, setup_simulators_and_wallets
|
||||
from tests.time_out_assert import time_out_assert
|
||||
from chia.types.blockchain_format.program import Program, INFINITE_COST
|
||||
from chia.consensus.condition_costs import ConditionCost
|
||||
from chia.consensus.cost_calculator import NPCResult
|
||||
from chia.types.blockchain_format.program import SerializedProgram
|
||||
from clvm_tools import binutils
|
||||
@ -1641,400 +1638,6 @@ class TestMempoolManager:
|
||||
assert err == Err.ASSERT_MY_AMOUNT_FAILED
|
||||
|
||||
|
||||
class TestConditionParser:
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_agg_sig(self, safe_mode: bool):
|
||||
|
||||
valid_pubkey = b"b" * 48
|
||||
short_pubkey = b"b" * 47
|
||||
long_pubkey = b"b" * 49
|
||||
|
||||
valid_message = b"a" * 1024
|
||||
long_message = b"a" * 1025
|
||||
empty_message = b""
|
||||
|
||||
for condition_code in [ConditionOpcode.AGG_SIG_UNSAFE, ConditionOpcode.AGG_SIG_ME]:
|
||||
cost, args = parse_condition_args(SExp.to([valid_pubkey, valid_message]), condition_code, safe_mode)
|
||||
assert cost == ConditionCost.AGG_SIG.value
|
||||
assert args == [valid_pubkey, valid_message]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([valid_pubkey, long_message]), condition_code, safe_mode)
|
||||
|
||||
# empty messages are allowed
|
||||
cost, args = parse_condition_args(SExp.to([valid_pubkey, empty_message]), condition_code, safe_mode)
|
||||
assert cost == ConditionCost.AGG_SIG.value
|
||||
assert args == [valid_pubkey, empty_message]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([short_pubkey, valid_message]), condition_code, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([long_pubkey, valid_message]), condition_code, safe_mode)
|
||||
|
||||
# missing message argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([valid_pubkey]), condition_code, safe_mode)
|
||||
|
||||
# missing all arguments
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), condition_code, safe_mode)
|
||||
|
||||
# garbage at the end of the arguments list is not allowed
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_pubkey, valid_message, b"garbage"]), condition_code, safe_mode
|
||||
)
|
||||
|
||||
# note how this is a list that isn't terminated with a NULL
|
||||
# we still treat this as a list of two items, ignoring the garbage
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to((valid_pubkey, (valid_message, b"garbage"))), condition_code, safe_mode
|
||||
)
|
||||
assert cost == ConditionCost.AGG_SIG.value
|
||||
assert args == [valid_pubkey, valid_message]
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_create_coin(self, safe_mode: bool):
|
||||
|
||||
valid_hash = b"b" * 32
|
||||
short_hash = b"b" * 31
|
||||
long_hash = b"b" * 33
|
||||
|
||||
valid_amount = int_to_bytes(1000000000)
|
||||
# this is greater than max coin amount
|
||||
large_amount = int_to_bytes(2 ** 64)
|
||||
leading_zeros_amount = bytes([0] * 100) + int_to_bytes(1000000000)
|
||||
negative_amount = int_to_bytes(-1000)
|
||||
# this ist still -1, but just with a lot of (redundant) 0xff bytes
|
||||
# prepended
|
||||
large_negative_amount = bytes([0xFF] * 100) + int_to_bytes(-1)
|
||||
|
||||
cost, args = parse_condition_args(SExp.to([valid_hash, valid_amount]), ConditionOpcode.CREATE_COIN, safe_mode)
|
||||
assert cost == ConditionCost.CREATE_COIN.value
|
||||
assert args == [valid_hash, valid_amount]
|
||||
|
||||
if safe_mode:
|
||||
# we don't allow over-long encoding in safe-mode
|
||||
with pytest.raises(ValidationError):
|
||||
parse_condition_args(
|
||||
SExp.to([valid_hash, leading_zeros_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
else:
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_hash, leading_zeros_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
assert cost == ConditionCost.CREATE_COIN.value
|
||||
# the amount will have its leading zeros stripped
|
||||
assert args == [valid_hash, valid_amount]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_hash, large_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([short_hash, valid_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([long_hash, valid_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_hash, negative_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_hash, large_negative_amount]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
|
||||
# missing amount
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([valid_hash]), ConditionOpcode.CREATE_COIN, safe_mode)
|
||||
|
||||
# missing everything
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), ConditionOpcode.CREATE_COIN, safe_mode)
|
||||
|
||||
# garbage at the end of the arguments list is allowed but stripped
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_hash, valid_amount, b"garbage"]), ConditionOpcode.CREATE_COIN, safe_mode
|
||||
)
|
||||
assert cost == ConditionCost.CREATE_COIN.value
|
||||
assert args == [valid_hash, valid_amount]
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_seconds(self, safe_mode: bool):
|
||||
|
||||
valid_timestamp = int_to_bytes(100)
|
||||
leading_zeros_timestamp = bytes([0] * 100) + int_to_bytes(100)
|
||||
negative_timestamp = int_to_bytes(-100)
|
||||
large_negative_timestamp = bytes([0xFF] * 100) + int_to_bytes(-1)
|
||||
|
||||
for condition_code in [ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, ConditionOpcode.ASSERT_SECONDS_RELATIVE]:
|
||||
cost, args = parse_condition_args(SExp.to([valid_timestamp]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_timestamp]
|
||||
|
||||
if safe_mode:
|
||||
# we don't allow over-long encodings in strict mode
|
||||
with pytest.raises(ValidationError):
|
||||
parse_condition_args(SExp.to([leading_zeros_timestamp]), condition_code, safe_mode)
|
||||
else:
|
||||
cost, args = parse_condition_args(SExp.to([leading_zeros_timestamp]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_timestamp]
|
||||
|
||||
# a condition with a negative timestamp is always true
|
||||
cost, args = parse_condition_args(SExp.to([negative_timestamp]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args is None
|
||||
|
||||
cost, args = parse_condition_args(SExp.to([large_negative_timestamp]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args is None
|
||||
|
||||
# garbage at the end of the arguments list is allowed but stripped
|
||||
cost, args = parse_condition_args(SExp.to([valid_timestamp, b"garbage"]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_timestamp]
|
||||
|
||||
# missing argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), condition_code, safe_mode)
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_height(self, safe_mode: bool):
|
||||
|
||||
valid_height = int_to_bytes(100)
|
||||
leading_zeros_height = bytes([0] * 100) + int_to_bytes(100)
|
||||
negative_height = int_to_bytes(-100)
|
||||
large_negative_height = bytes([0xFF] * 100) + int_to_bytes(-1)
|
||||
|
||||
for condition_code in [ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, ConditionOpcode.ASSERT_HEIGHT_RELATIVE]:
|
||||
cost, args = parse_condition_args(SExp.to([valid_height]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_height]
|
||||
|
||||
if safe_mode:
|
||||
with pytest.raises(ValidationError):
|
||||
parse_condition_args(SExp.to([leading_zeros_height]), condition_code, safe_mode)
|
||||
else:
|
||||
cost, args = parse_condition_args(SExp.to([leading_zeros_height]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_height]
|
||||
|
||||
# a condition with a negative height is always true
|
||||
cost, args = parse_condition_args(SExp.to([negative_height]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args is None
|
||||
|
||||
cost, args = parse_condition_args(SExp.to([large_negative_height]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args is None
|
||||
|
||||
# garbage at the end of the arguments list is allowed but stripped
|
||||
cost, args = parse_condition_args(SExp.to([valid_height, b"garbage"]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_height]
|
||||
|
||||
# missing argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), condition_code, safe_mode)
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_coin_id(self, safe_mode: bool):
|
||||
|
||||
valid_coin_id = b"a" * 32
|
||||
short_coin_id = b"a" * 31
|
||||
long_coin_id = b"a" * 33
|
||||
|
||||
for condition_code in [ConditionOpcode.ASSERT_MY_COIN_ID, ConditionOpcode.ASSERT_MY_PARENT_ID]:
|
||||
cost, args = parse_condition_args(SExp.to([valid_coin_id]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_coin_id]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([short_coin_id]), condition_code, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([long_coin_id]), condition_code, safe_mode)
|
||||
|
||||
# garbage at the end of the arguments list is allowed but stripped
|
||||
cost, args = parse_condition_args(SExp.to([valid_coin_id, b"garbage"]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_coin_id]
|
||||
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), condition_code, safe_mode)
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_fee(self, safe_mode: bool):
|
||||
|
||||
valid_fee = int_to_bytes(100)
|
||||
leading_zeros_fee = bytes([0] * 100) + int_to_bytes(100)
|
||||
negative_fee = int_to_bytes(-100)
|
||||
large_negative_fee = bytes([0xFF] * 100) + int_to_bytes(-1)
|
||||
large_fee = int_to_bytes(2 ** 64)
|
||||
|
||||
cost, args = parse_condition_args(SExp.to([valid_fee]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_fee]
|
||||
|
||||
if safe_mode:
|
||||
with pytest.raises(ValidationError):
|
||||
parse_condition_args(SExp.to([leading_zeros_fee]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
else:
|
||||
cost, args = parse_condition_args(SExp.to([leading_zeros_fee]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_fee]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([negative_fee]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([large_fee]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([large_negative_fee]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
|
||||
# garbage at the end of the arguments list is allowed but stripped
|
||||
cost, args = parse_condition_args(SExp.to([valid_fee, b"garbage"]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_fee]
|
||||
|
||||
# missing argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), ConditionOpcode.RESERVE_FEE, safe_mode)
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_create_announcement(self, safe_mode: bool):
|
||||
|
||||
valid_msg = b"a" * 1024
|
||||
long_msg = b"a" * 1025
|
||||
empty_msg = b""
|
||||
|
||||
for condition_code in [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT]:
|
||||
cost, args = parse_condition_args(SExp.to([valid_msg]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_msg]
|
||||
|
||||
cost, args = parse_condition_args(SExp.to([empty_msg]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [empty_msg]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([long_msg]), condition_code, safe_mode)
|
||||
|
||||
# missing argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), condition_code, safe_mode)
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_assert_announcement(self, safe_mode: bool):
|
||||
|
||||
valid_hash = b"b" * 32
|
||||
short_hash = b"b" * 31
|
||||
long_hash = b"b" * 33
|
||||
|
||||
for condition_code in [
|
||||
ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT,
|
||||
ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT,
|
||||
ConditionOpcode.ASSERT_MY_PUZZLEHASH,
|
||||
]:
|
||||
cost, args = parse_condition_args(SExp.to([valid_hash]), condition_code, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_hash]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([short_hash]), condition_code, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([long_hash]), condition_code, safe_mode)
|
||||
|
||||
# missing argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), condition_code, safe_mode)
|
||||
|
||||
@pytest.mark.parametrize("safe_mode", [True, False])
|
||||
def test_parse_condition_my_amount(self, safe_mode: bool):
|
||||
|
||||
valid_amount = int_to_bytes(100)
|
||||
leading_zeros_amount = bytes([0] * 100) + int_to_bytes(100)
|
||||
negative_amount = int_to_bytes(-100)
|
||||
large_negative_amount = bytes([0xFF] * 100) + int_to_bytes(-1)
|
||||
large_amount = int_to_bytes(2 ** 64)
|
||||
|
||||
cost, args = parse_condition_args(SExp.to([valid_amount]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode)
|
||||
assert cost == 0
|
||||
assert args == [valid_amount]
|
||||
|
||||
if safe_mode:
|
||||
with pytest.raises(ValidationError):
|
||||
parse_condition_args(SExp.to([leading_zeros_amount]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode)
|
||||
else:
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([leading_zeros_amount]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode
|
||||
)
|
||||
assert cost == 0
|
||||
assert args == [valid_amount]
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([negative_amount]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([large_amount]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([large_negative_amount]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode
|
||||
)
|
||||
|
||||
# garbage at the end of the arguments list is allowed but stripped
|
||||
cost, args = parse_condition_args(
|
||||
SExp.to([valid_amount, b"garbage"]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode
|
||||
)
|
||||
assert cost == 0
|
||||
assert args == [valid_amount]
|
||||
|
||||
# missing argument
|
||||
with pytest.raises(EvalError):
|
||||
cost, args = parse_condition_args(SExp.to([]), ConditionOpcode.ASSERT_MY_AMOUNT, safe_mode)
|
||||
|
||||
def test_parse_unknown_condition(self):
|
||||
|
||||
for opcode in [129, 0, 1, 1000, 74]:
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([b"test"]), opcode, False)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([b"foo", b"bar"]), opcode, False)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition_args(SExp.to([]), opcode, False)
|
||||
|
||||
def test_parse_condition(self):
|
||||
|
||||
for opcode in [129, 0, 1, 1000, 74]:
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition(SExp.to([int_to_bytes(opcode), b"test"]), safe_mode=True)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition(SExp.to([int_to_bytes(opcode), b"foo", b"bar"]), safe_mode=True)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
cost, args = parse_condition(SExp.to([int_to_bytes(opcode)]), safe_mode=True)
|
||||
|
||||
assert (0, None) == parse_condition(SExp.to([int_to_bytes(opcode), b"test"]), safe_mode=False)
|
||||
assert (0, None) == parse_condition(SExp.to([int_to_bytes(opcode), b"foo", b"bar"]), safe_mode=False)
|
||||
assert (0, None) == parse_condition(SExp.to([int_to_bytes(opcode)]), safe_mode=False)
|
||||
|
||||
|
||||
# the following tests generate generator programs and run them through get_name_puzzle_conditions()
|
||||
|
||||
COST_PER_BYTE = 12000
|
||||
@ -2044,7 +1647,6 @@ MAX_BLOCK_COST_CLVM = 11000000000
|
||||
def generator_condition_tester(
|
||||
conditions: str,
|
||||
*,
|
||||
rust_checker: bool,
|
||||
safe_mode: bool = False,
|
||||
quote: bool = True,
|
||||
max_cost: int = MAX_BLOCK_COST_CLVM,
|
||||
@ -2055,19 +1657,18 @@ def generator_condition_tester(
|
||||
generator = BlockGenerator(program, [])
|
||||
print(f"len: {len(bytes(program))}")
|
||||
npc_result: NPCResult = get_name_puzzle_conditions(
|
||||
generator, max_cost, cost_per_byte=COST_PER_BYTE, safe_mode=safe_mode, rust_checker=rust_checker
|
||||
generator, max_cost, cost_per_byte=COST_PER_BYTE, safe_mode=safe_mode
|
||||
)
|
||||
return npc_result
|
||||
|
||||
|
||||
class TestGeneratorConditions:
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_invalid_condition_args_terminator(self, rust_checker: bool):
|
||||
def test_invalid_condition_args_terminator(self):
|
||||
|
||||
# note how the condition argument list isn't correctly terminated with a
|
||||
# NIL atom. This is allowed, and all arguments beyond the ones we look
|
||||
# at are ignored, including the termination of the list
|
||||
npc_result = generator_condition_tester("(80 50 . 1)", rust_checker=rust_checker)
|
||||
npc_result = generator_condition_tester("(80 50 . 1)")
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 1
|
||||
opcode = ConditionOpcode(bytes([80]))
|
||||
@ -2077,16 +1678,14 @@ class TestGeneratorConditions:
|
||||
c = npc_result.npc_list[0].conditions[0][1][0]
|
||||
assert c == ConditionWithArgs(opcode=ConditionOpcode.ASSERT_SECONDS_RELATIVE, vars=[bytes([50])])
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_invalid_condition_list_terminator(self, rust_checker: bool):
|
||||
def test_invalid_condition_list_terminator(self):
|
||||
|
||||
# note how the list of conditions isn't correctly terminated with a
|
||||
# NIL atom. This is a failure
|
||||
npc_result = generator_condition_tester("(80 50) . 3", rust_checker=rust_checker)
|
||||
npc_result = generator_condition_tester("(80 50) . 3")
|
||||
assert npc_result.error in [Err.INVALID_CONDITION.value, Err.GENERATOR_RUNTIME_ERROR.value]
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_duplicate_height_time_conditions(self, rust_checker: bool):
|
||||
def test_duplicate_height_time_conditions(self):
|
||||
# ASSERT_SECONDS_RELATIVE
|
||||
# ASSERT_SECONDS_ABSOLUTE
|
||||
# ASSERT_HEIGHT_RELATIVE
|
||||
@ -2094,9 +1693,7 @@ class TestGeneratorConditions:
|
||||
for cond in [80, 81, 82, 83]:
|
||||
# even though the generator outputs multiple conditions, we only
|
||||
# need to return the highest one (i.e. most strict)
|
||||
npc_result = generator_condition_tester(
|
||||
" ".join([f"({cond} {i})" for i in range(50, 101)]), rust_checker=rust_checker
|
||||
)
|
||||
npc_result = generator_condition_tester(" ".join([f"({cond} {i})" for i in range(50, 101)]))
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 1
|
||||
opcode = ConditionOpcode(bytes([cond]))
|
||||
@ -2107,29 +1704,21 @@ class TestGeneratorConditions:
|
||||
max_arg = max(max_arg, int_from_bytes(c.vars[0]))
|
||||
assert max_arg == 100
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_just_announcement(self, rust_checker: bool):
|
||||
def test_just_announcement(self):
|
||||
# CREATE_COIN_ANNOUNCEMENT
|
||||
# CREATE_PUZZLE_ANNOUNCEMENT
|
||||
for cond in [60, 62]:
|
||||
message = "a" * 1024
|
||||
# announcements are validated on the Rust side and never returned
|
||||
# back. They are either satisified or cause an immediate failure
|
||||
npc_result = generator_condition_tester(f'({cond} "{message}") ' * 50, rust_checker=rust_checker)
|
||||
npc_result = generator_condition_tester(f'({cond} "{message}") ' * 50)
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 1
|
||||
if rust_checker:
|
||||
# create-announcements and assert-announcements are dropped once
|
||||
# validated
|
||||
assert npc_result.npc_list[0].conditions == []
|
||||
else:
|
||||
assert len(npc_result.npc_list[0].conditions) == 1
|
||||
print(npc_result.npc_list[0].conditions[0][0])
|
||||
assert npc_result.npc_list[0].conditions[0][0] == ConditionOpcode(bytes([cond]))
|
||||
assert len(npc_result.npc_list[0].conditions[0][1]) == 50
|
||||
# create-announcements and assert-announcements are dropped once
|
||||
# validated
|
||||
assert npc_result.npc_list[0].conditions == []
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_assert_announcement_fail(self, rust_checker: bool):
|
||||
def test_assert_announcement_fail(self):
|
||||
# ASSERT_COIN_ANNOUNCEMENT
|
||||
# ASSERT_PUZZLE_ANNOUNCEMENT
|
||||
for cond in [61, 63]:
|
||||
@ -2138,17 +1727,16 @@ class TestGeneratorConditions:
|
||||
# back. They ar either satisified or cause an immediate failure
|
||||
# in this test we just assert announcements, we never make them, so
|
||||
# these should fail
|
||||
npc_result = generator_condition_tester(f'({cond} "{message}") ', rust_checker=rust_checker)
|
||||
npc_result = generator_condition_tester(f'({cond} "{message}") ')
|
||||
assert npc_result.error == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED.value
|
||||
assert npc_result.npc_list == []
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_multiple_reserve_fee(self, rust_checker: bool):
|
||||
def test_multiple_reserve_fee(self):
|
||||
# RESERVE_FEE
|
||||
cond = 52
|
||||
# even though the generator outputs 3 conditions, we only need to return one copy
|
||||
# with all the fees accumulated
|
||||
npc_result = generator_condition_tester(f"({cond} 100) " * 3, rust_checker=rust_checker)
|
||||
npc_result = generator_condition_tester(f"({cond} 100) " * 3)
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 1
|
||||
opcode = ConditionOpcode(bytes([cond]))
|
||||
@ -2160,31 +1748,25 @@ class TestGeneratorConditions:
|
||||
reserve_fee += int_from_bytes(c.vars[0])
|
||||
|
||||
assert reserve_fee == 300
|
||||
if rust_checker:
|
||||
assert len(npc_result.npc_list[0].conditions[0][1]) == 1
|
||||
assert len(npc_result.npc_list[0].conditions[0][1]) == 1
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_duplicate_outputs(self, rust_checker: bool):
|
||||
def test_duplicate_outputs(self):
|
||||
# CREATE_COIN
|
||||
# creating multiple coins with the same properties (same parent, same
|
||||
# target puzzle hash and same amount) is not allowed. That's a consensus
|
||||
# failure.
|
||||
puzzle_hash = "abababababababababababababababab"
|
||||
npc_result = generator_condition_tester(f'(51 "{puzzle_hash}" 10) ' * 2, rust_checker=rust_checker)
|
||||
if rust_checker:
|
||||
assert npc_result.error == Err.DUPLICATE_OUTPUT.value
|
||||
assert npc_result.npc_list == []
|
||||
else:
|
||||
assert npc_result.error is None
|
||||
npc_result = generator_condition_tester(f'(51 "{puzzle_hash}" 10) ' * 2)
|
||||
assert npc_result.error == Err.DUPLICATE_OUTPUT.value
|
||||
assert npc_result.npc_list == []
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_create_coin_cost(self, rust_checker: bool):
|
||||
def test_create_coin_cost(self):
|
||||
# CREATE_COIN
|
||||
puzzle_hash = "abababababababababababababababab"
|
||||
|
||||
# this max cost is exactly enough for the create coin condition
|
||||
npc_result = generator_condition_tester(
|
||||
f'(51 "{puzzle_hash}" 10) ', max_cost=20470 + 95 * COST_PER_BYTE + 1800000, rust_checker=rust_checker
|
||||
f'(51 "{puzzle_hash}" 10) ', max_cost=20470 + 95 * COST_PER_BYTE + 1800000
|
||||
)
|
||||
assert npc_result.error is None
|
||||
assert npc_result.clvm_cost == 20470
|
||||
@ -2192,18 +1774,17 @@ class TestGeneratorConditions:
|
||||
|
||||
# if we subtract one from max cost, this should fail
|
||||
npc_result = generator_condition_tester(
|
||||
f'(51 "{puzzle_hash}" 10) ', max_cost=20470 + 95 * COST_PER_BYTE + 1800000 - 1, rust_checker=rust_checker
|
||||
f'(51 "{puzzle_hash}" 10) ', max_cost=20470 + 95 * COST_PER_BYTE + 1800000 - 1
|
||||
)
|
||||
assert npc_result.error in [Err.BLOCK_COST_EXCEEDS_MAX.value, Err.INVALID_BLOCK_COST.value]
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_agg_sig_cost(self, rust_checker: bool):
|
||||
def test_agg_sig_cost(self):
|
||||
# AGG_SIG_ME
|
||||
pubkey = "abababababababababababababababababababababababab"
|
||||
|
||||
# this max cost is exactly enough for the AGG_SIG condition
|
||||
npc_result = generator_condition_tester(
|
||||
f'(49 "{pubkey}" "foobar") ', max_cost=20512 + 117 * COST_PER_BYTE + 1200000, rust_checker=rust_checker
|
||||
f'(49 "{pubkey}" "foobar") ', max_cost=20512 + 117 * COST_PER_BYTE + 1200000
|
||||
)
|
||||
assert npc_result.error is None
|
||||
assert npc_result.clvm_cost == 20512
|
||||
@ -2211,12 +1792,11 @@ class TestGeneratorConditions:
|
||||
|
||||
# if we subtract one from max cost, this should fail
|
||||
npc_result = generator_condition_tester(
|
||||
f'(49 "{pubkey}" "foobar") ', max_cost=20512 + 117 * COST_PER_BYTE + 1200000 - 1, rust_checker=rust_checker
|
||||
f'(49 "{pubkey}" "foobar") ', max_cost=20512 + 117 * COST_PER_BYTE + 1200000 - 1
|
||||
)
|
||||
assert npc_result.error in [Err.BLOCK_COST_EXCEEDS_MAX.value, Err.INVALID_BLOCK_COST.value]
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_create_coin_different_parent(self, rust_checker: bool):
|
||||
def test_create_coin_different_parent(self):
|
||||
|
||||
# if the coins we create have different parents, they are never
|
||||
# considered duplicate, even when they have the same puzzle hash and
|
||||
@ -2229,7 +1809,7 @@ class TestGeneratorConditions:
|
||||
)
|
||||
generator = BlockGenerator(program, [])
|
||||
npc_result: NPCResult = get_name_puzzle_conditions(
|
||||
generator, MAX_BLOCK_COST_CLVM, cost_per_byte=COST_PER_BYTE, safe_mode=False, rust_checker=rust_checker
|
||||
generator, MAX_BLOCK_COST_CLVM, cost_per_byte=COST_PER_BYTE, safe_mode=False
|
||||
)
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 2
|
||||
@ -2242,15 +1822,12 @@ class TestGeneratorConditions:
|
||||
)
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_create_coin_different_puzzhash(self, rust_checker: bool):
|
||||
def test_create_coin_different_puzzhash(self):
|
||||
# CREATE_COIN
|
||||
# coins with different puzzle hashes are not considered duplicate
|
||||
puzzle_hash_1 = "abababababababababababababababab"
|
||||
puzzle_hash_2 = "cbcbcbcbcbcbcbcbcbcbcbcbcbcbcbcb"
|
||||
npc_result = generator_condition_tester(
|
||||
f'(51 "{puzzle_hash_1}" 5) (51 "{puzzle_hash_2}" 5)', rust_checker=rust_checker
|
||||
)
|
||||
npc_result = generator_condition_tester(f'(51 "{puzzle_hash_1}" 5) (51 "{puzzle_hash_2}" 5)')
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 1
|
||||
opcode = ConditionOpcode.CREATE_COIN
|
||||
@ -2263,14 +1840,11 @@ class TestGeneratorConditions:
|
||||
in npc_result.npc_list[0].conditions[0][1]
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_create_coin_different_amounts(self, rust_checker: bool):
|
||||
def test_create_coin_different_amounts(self):
|
||||
# CREATE_COIN
|
||||
# coins with different amounts are not considered duplicate
|
||||
puzzle_hash = "abababababababababababababababab"
|
||||
npc_result = generator_condition_tester(
|
||||
f'(51 "{puzzle_hash}" 5) (51 "{puzzle_hash}" 4)', rust_checker=rust_checker
|
||||
)
|
||||
npc_result = generator_condition_tester(f'(51 "{puzzle_hash}" 5) (51 "{puzzle_hash}" 4)')
|
||||
assert npc_result.error is None
|
||||
assert len(npc_result.npc_list) == 1
|
||||
opcode = ConditionOpcode.CREATE_COIN
|
||||
@ -2283,11 +1857,10 @@ class TestGeneratorConditions:
|
||||
in npc_result.npc_list[0].conditions[0][1]
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_unknown_condition(self, rust_checker: bool):
|
||||
def test_unknown_condition(self):
|
||||
for sm in [True, False]:
|
||||
for c in ['(1 100 "foo" "bar")', "(100)", "(1 1) (2 2) (3 3)", '("foobar")']:
|
||||
npc_result = generator_condition_tester(c, safe_mode=sm, rust_checker=rust_checker)
|
||||
npc_result = generator_condition_tester(c, safe_mode=sm)
|
||||
print(npc_result)
|
||||
if sm:
|
||||
assert npc_result.error == Err.INVALID_CONDITION.value
|
||||
|
@ -54,8 +54,7 @@ def large_block_generator(size):
|
||||
|
||||
class TestCostCalculation:
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_basics(self, rust_checker: bool):
|
||||
async def test_basics(self):
|
||||
wallet_tool = bt.get_pool_wallet_tool()
|
||||
ph = wallet_tool.get_new_puzzlehash()
|
||||
num_blocks = 3
|
||||
@ -81,7 +80,6 @@ class TestCostCalculation:
|
||||
test_constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=test_constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
|
||||
cost = calculate_cost_of_program(program.program, npc_result, test_constants.COST_PER_BYTE)
|
||||
@ -107,8 +105,7 @@ class TestCostCalculation:
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_strict_mode(self, rust_checker: bool):
|
||||
async def test_strict_mode(self):
|
||||
wallet_tool = bt.get_pool_wallet_tool()
|
||||
ph = wallet_tool.get_new_puzzlehash()
|
||||
|
||||
@ -147,7 +144,6 @@ class TestCostCalculation:
|
||||
test_constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=test_constants.COST_PER_BYTE,
|
||||
safe_mode=True,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
assert npc_result.error is not None
|
||||
npc_result = get_name_puzzle_conditions(
|
||||
@ -155,7 +151,6 @@ class TestCostCalculation:
|
||||
test_constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=test_constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
assert npc_result.error is None
|
||||
|
||||
@ -166,8 +161,7 @@ class TestCostCalculation:
|
||||
assert error is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_clvm_strict_mode(self, rust_checker: bool):
|
||||
async def test_clvm_strict_mode(self):
|
||||
block = Program.from_bytes(bytes(SMALL_BLOCK_GENERATOR.program))
|
||||
disassembly = binutils.disassemble(block)
|
||||
# this is a valid generator program except the first clvm
|
||||
@ -181,7 +175,6 @@ class TestCostCalculation:
|
||||
test_constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=test_constants.COST_PER_BYTE,
|
||||
safe_mode=True,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
assert npc_result.error is not None
|
||||
npc_result = get_name_puzzle_conditions(
|
||||
@ -189,13 +182,11 @@ class TestCostCalculation:
|
||||
test_constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=test_constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
assert npc_result.error is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_tx_generator_speed(self, rust_checker: bool):
|
||||
async def test_tx_generator_speed(self):
|
||||
LARGE_BLOCK_COIN_CONSUMED_COUNT = 687
|
||||
generator_bytes = large_block_generator(LARGE_BLOCK_COIN_CONSUMED_COUNT)
|
||||
program = SerializedProgram.from_bytes(generator_bytes)
|
||||
@ -207,7 +198,6 @@ class TestCostCalculation:
|
||||
test_constants.MAX_BLOCK_COST_CLVM,
|
||||
cost_per_byte=test_constants.COST_PER_BYTE,
|
||||
safe_mode=False,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
@ -218,8 +208,7 @@ class TestCostCalculation:
|
||||
assert duration < 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
async def test_clvm_max_cost(self, rust_checker: bool):
|
||||
async def test_clvm_max_cost(self):
|
||||
|
||||
block = Program.from_bytes(bytes(SMALL_BLOCK_GENERATOR.program))
|
||||
disassembly = binutils.disassemble(block)
|
||||
@ -234,18 +223,14 @@ class TestCostCalculation:
|
||||
|
||||
# ensure we fail if the program exceeds the cost
|
||||
generator = BlockGenerator(program, [])
|
||||
npc_result: NPCResult = get_name_puzzle_conditions(
|
||||
generator, 10000000, cost_per_byte=0, safe_mode=False, rust_checker=rust_checker
|
||||
)
|
||||
npc_result: NPCResult = get_name_puzzle_conditions(generator, 10000000, cost_per_byte=0, safe_mode=False)
|
||||
|
||||
assert npc_result.error is not None
|
||||
assert npc_result.clvm_cost == 0
|
||||
|
||||
# raise the max cost to make sure this passes
|
||||
# ensure we pass if the program does not exceeds the cost
|
||||
npc_result = get_name_puzzle_conditions(
|
||||
generator, 20000000, cost_per_byte=0, safe_mode=False, rust_checker=rust_checker
|
||||
)
|
||||
npc_result = get_name_puzzle_conditions(generator, 20000000, cost_per_byte=0, safe_mode=False)
|
||||
|
||||
assert npc_result.error is None
|
||||
assert npc_result.clvm_cost > 10000000
|
||||
|
@ -8,6 +8,7 @@ from chia.util.config import create_default_chia_config, initial_config_file, lo
|
||||
from chia.util.path import mkdir
|
||||
from multiprocessing import Pool
|
||||
from pathlib import Path
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
from typing import Dict
|
||||
|
||||
@ -52,10 +53,14 @@ async def create_reader_and_writer_tasks(root_path: Path, default_config: Dict):
|
||||
"""
|
||||
Spin-off reader and writer threads and wait for completion
|
||||
"""
|
||||
await asyncio.gather(
|
||||
asyncio.to_thread(write_config, root_path, default_config),
|
||||
asyncio.to_thread(read_and_compare_config, root_path, default_config),
|
||||
)
|
||||
thread1 = Thread(target=write_config, kwargs={"root_path": root_path, "config": default_config})
|
||||
thread2 = Thread(target=read_and_compare_config, kwargs={"root_path": root_path, "default_config": default_config})
|
||||
|
||||
thread1.start()
|
||||
thread2.start()
|
||||
|
||||
thread1.join()
|
||||
thread2.join()
|
||||
|
||||
|
||||
def run_reader_and_writer_tasks(root_path: Path, default_config: Dict):
|
||||
|
@ -1,4 +1,3 @@
|
||||
import pytest
|
||||
from clvm_tools import binutils
|
||||
from clvm_tools.clvmc import compile_clvm_text
|
||||
|
||||
@ -94,17 +93,14 @@ class TestROM:
|
||||
assert cost == EXPECTED_ABBREVIATED_COST
|
||||
assert r.as_bin().hex() == EXPECTED_OUTPUT
|
||||
|
||||
@pytest.mark.parametrize("rust_checker", [True, False])
|
||||
def test_get_name_puzzle_conditions(self, rust_checker: bool):
|
||||
def test_get_name_puzzle_conditions(self):
|
||||
# this tests that extra block or coin data doesn't confuse `get_name_puzzle_conditions`
|
||||
|
||||
gen = block_generator()
|
||||
cost, r = run_generator(gen, max_cost=MAX_COST)
|
||||
print(r)
|
||||
|
||||
npc_result = get_name_puzzle_conditions(
|
||||
gen, max_cost=MAX_COST, cost_per_byte=COST_PER_BYTE, safe_mode=False, rust_checker=rust_checker
|
||||
)
|
||||
npc_result = get_name_puzzle_conditions(gen, max_cost=MAX_COST, cost_per_byte=COST_PER_BYTE, safe_mode=False)
|
||||
assert npc_result.error is None
|
||||
assert npc_result.clvm_cost == EXPECTED_COST
|
||||
cond_1 = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bytes([0] * 31 + [1]), int_to_bytes(500)])
|
||||
|
@ -163,6 +163,7 @@ class TestPoolPuzzles(TestCase):
|
||||
spend_bundle,
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
# Test that we can retrieve the extra data
|
||||
assert get_delayed_puz_info_from_launcher_spend(launcher_coinsol) == (DELAY_TIME, DELAY_PH)
|
||||
@ -185,6 +186,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle([post_launch_coinsol], G2Element()),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
|
||||
# HONEST ABSORB
|
||||
@ -220,6 +222,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle(coin_sols, G2Element()),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
|
||||
# ABSORB A NON EXISTENT REWARD (Negative test)
|
||||
@ -251,6 +254,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle([singleton_coinsol], G2Element()),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
except BadSpendBundleError as e:
|
||||
assert str(e) == "condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED"
|
||||
@ -280,6 +284,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle([singleton_coinsol, bad_coinsol], G2Element()),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
except BadSpendBundleError as e:
|
||||
assert str(e) == "condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED"
|
||||
@ -310,6 +315,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle([travel_coinsol], sig),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
|
||||
# ESCAPE TOO FAST (Negative test)
|
||||
@ -336,6 +342,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle([return_coinsol], sig),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
except BadSpendBundleError as e:
|
||||
assert str(e) == "condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED"
|
||||
@ -359,6 +366,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle(coin_sols, G2Element()),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
|
||||
# LEAVE THE WAITING ROOM
|
||||
@ -394,6 +402,7 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle([return_coinsol], sig),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
|
||||
# ABSORB ONCE MORE FOR GOOD MEASURE
|
||||
@ -414,4 +423,5 @@ class TestPoolPuzzles(TestCase):
|
||||
SpendBundle(coin_sols, G2Element()),
|
||||
time,
|
||||
DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
|
||||
DEFAULT_CONSTANTS.COST_PER_BYTE,
|
||||
)
|
||||
|
@ -9,7 +9,7 @@ from chia.util.generator_tools import additions_for_npc
|
||||
|
||||
|
||||
def run_and_get_removals_and_additions(
|
||||
block: FullBlock, max_cost: int, cost_per_byte: int, rust_checker: bool, safe_mode=False
|
||||
block: FullBlock, max_cost: int, cost_per_byte: int, safe_mode=False
|
||||
) -> Tuple[List[bytes32], List[Coin]]:
|
||||
removals: List[bytes32] = []
|
||||
additions: List[Coin] = []
|
||||
@ -24,7 +24,6 @@ def run_and_get_removals_and_additions(
|
||||
max_cost,
|
||||
cost_per_byte=cost_per_byte,
|
||||
safe_mode=safe_mode,
|
||||
rust_checker=rust_checker,
|
||||
)
|
||||
# build removals list
|
||||
for npc in npc_result.npc_list:
|
||||
|
454
tests/wallet/simple_sync/test_simple_sync_protocol.py
Normal file
454
tests/wallet/simple_sync/test_simple_sync_protocol.py
Normal file
@ -0,0 +1,454 @@
|
||||
# flake8: noqa: F811, F401
|
||||
import asyncio
|
||||
from typing import List, Optional
|
||||
|
||||
import pytest
|
||||
from colorlog import logging
|
||||
|
||||
from chia.consensus.block_rewards import calculate_pool_reward, calculate_base_farmer_reward
|
||||
from chia.protocols import wallet_protocol
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes
|
||||
from chia.protocols.wallet_protocol import RespondToCoinUpdates, CoinStateUpdate, RespondToPhUpdates
|
||||
from chia.server.outbound_message import NodeType
|
||||
from chia.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.coin_record import CoinRecord
|
||||
from chia.types.peer_info import PeerInfo
|
||||
from chia.util.ints import uint16, uint32, uint64
|
||||
from chia.wallet.wallet import Wallet
|
||||
from chia.wallet.wallet_state_manager import WalletStateManager
|
||||
from tests.connection_utils import add_dummy_connection
|
||||
from tests.setup_nodes import self_hostname, setup_simulators_and_wallets
|
||||
from tests.time_out_assert import time_out_assert
|
||||
from tests.wallet.cc_wallet.test_cc_wallet import tx_in_pool
|
||||
|
||||
|
||||
def wallet_height_at_least(wallet_node, h):
|
||||
height = wallet_node.wallet_state_manager.blockchain._peak_height
|
||||
if height == h:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
loop = asyncio.get_event_loop()
|
||||
yield loop
|
||||
|
||||
|
||||
class TestSimpleSyncProtocol:
|
||||
@pytest.fixture(scope="function")
|
||||
async def wallet_node_simulator(self):
|
||||
async for _ in setup_simulators_and_wallets(1, 1, {}):
|
||||
yield _
|
||||
|
||||
async def get_all_messages_in_queue(self, queue):
|
||||
all_messages = []
|
||||
await asyncio.sleep(2)
|
||||
while not queue.empty():
|
||||
message, peer = await queue.get()
|
||||
all_messages.append(message)
|
||||
return all_messages
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_subscribe_for_ph(self, wallet_node_simulator):
|
||||
num_blocks = 4
|
||||
full_nodes, wallets = wallet_node_simulator
|
||||
full_node_api = full_nodes[0]
|
||||
wallet_node, server_2 = wallets[0]
|
||||
fn_server = full_node_api.full_node.server
|
||||
wsm: WalletStateManager = wallet_node.wallet_state_manager
|
||||
|
||||
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
|
||||
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
|
||||
|
||||
zero_ph = 32 * b"\0"
|
||||
junk_ph = 32 * b"\a"
|
||||
fake_wallet_peer = fn_server.all_connections[peer_id]
|
||||
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
|
||||
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
|
||||
|
||||
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
|
||||
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
|
||||
assert data_response.coin_states == []
|
||||
|
||||
# Farm few more with reward
|
||||
for i in range(0, num_blocks):
|
||||
if i == num_blocks - 1:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
|
||||
else:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
|
||||
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
|
||||
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
|
||||
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
|
||||
assert len(data_response.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
|
||||
|
||||
# Farm more rewards to check the incoming queue for the updates
|
||||
for i in range(0, num_blocks):
|
||||
if i == num_blocks - 1:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
|
||||
else:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
zero_coin = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [zero_ph])
|
||||
all_zero_coin = set(zero_coin)
|
||||
notified_zero_coins = set()
|
||||
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
for coin_state in data_response.items:
|
||||
notified_zero_coins.add(coin_state)
|
||||
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
|
||||
|
||||
assert all_zero_coin == notified_zero_coins
|
||||
|
||||
# Test subscribing to more coins
|
||||
one_ph = 32 * b"\1"
|
||||
msg = wallet_protocol.RegisterForPhUpdates([one_ph], 0)
|
||||
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
|
||||
peak = full_node_api.full_node.blockchain.get_peak()
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
if i == num_blocks - 1:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
|
||||
else:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
if i == num_blocks - 1:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
|
||||
else:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
|
||||
|
||||
zero_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(
|
||||
True, [zero_ph], peak.height + 1
|
||||
)
|
||||
one_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [one_ph])
|
||||
|
||||
all_coins = set(zero_coins)
|
||||
all_coins.update(one_coins)
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
notified_all_coins = set()
|
||||
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
for coin_state in data_response.items:
|
||||
notified_all_coins.add(coin_state)
|
||||
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
|
||||
|
||||
assert all_coins == notified_all_coins
|
||||
|
||||
wsm: WalletStateManager = wallet_node.wallet_state_manager
|
||||
wallet: Wallet = wsm.wallets[1]
|
||||
puzzle_hash = await wallet.get_new_puzzlehash()
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
if i == num_blocks - 1:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
|
||||
else:
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
funds = sum(
|
||||
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
|
||||
)
|
||||
|
||||
await time_out_assert(15, wallet.get_confirmed_balance, funds)
|
||||
|
||||
msg_1 = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
|
||||
msg_response_1 = await full_node_api.register_interest_in_puzzle_hash(msg_1, fake_wallet_peer)
|
||||
assert msg_response_1.type == ProtocolMessageTypes.respond_to_ph_update.value
|
||||
data_response_1: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response_1.data)
|
||||
assert len(data_response_1.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
|
||||
|
||||
tx_record = await wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
|
||||
assert len(tx_record.spend_bundle.removals()) == 1
|
||||
spent_coin = tx_record.spend_bundle.removals()[0]
|
||||
assert spent_coin.puzzle_hash == puzzle_hash
|
||||
|
||||
await wallet.push_transaction(tx_record)
|
||||
|
||||
await time_out_assert(
|
||||
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
|
||||
)
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
notified_state = None
|
||||
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
for coin_state in data_response.items:
|
||||
if coin_state.coin.name() == spent_coin.name():
|
||||
notified_state = coin_state
|
||||
|
||||
assert notified_state is not None
|
||||
assert notified_state.coin == spent_coin
|
||||
assert notified_state.spent_height is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_subscribe_for_coin_id(self, wallet_node_simulator):
|
||||
num_blocks = 4
|
||||
full_nodes, wallets = wallet_node_simulator
|
||||
full_node_api = full_nodes[0]
|
||||
wallet_node, server_2 = wallets[0]
|
||||
fn_server = full_node_api.full_node.server
|
||||
wsm: WalletStateManager = wallet_node.wallet_state_manager
|
||||
standard_wallet: Wallet = wsm.wallets[1]
|
||||
puzzle_hash = await standard_wallet.get_new_puzzlehash()
|
||||
|
||||
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
|
||||
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
|
||||
|
||||
fake_wallet_peer = fn_server.all_connections[peer_id]
|
||||
|
||||
# Farm to create a coin that we'll track
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
funds = sum(
|
||||
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
|
||||
)
|
||||
|
||||
await time_out_assert(15, standard_wallet.get_confirmed_balance, funds)
|
||||
|
||||
my_coins: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
|
||||
True, puzzle_hash
|
||||
)
|
||||
coin_to_spend = my_coins[0].coin
|
||||
|
||||
msg = wallet_protocol.RegisterForCoinUpdates([coin_to_spend.name()], 0)
|
||||
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
|
||||
assert msg_response is not None
|
||||
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
|
||||
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
|
||||
assert data_response.coin_states[0].coin == coin_to_spend
|
||||
|
||||
coins = set()
|
||||
coins.add(coin_to_spend)
|
||||
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0), coins=coins)
|
||||
await standard_wallet.push_transaction(tx_record)
|
||||
|
||||
await time_out_assert(
|
||||
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
|
||||
)
|
||||
|
||||
# Farm transaction
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
notified_coins = set()
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
for coin_state in data_response.items:
|
||||
notified_coins.add(coin_state.coin)
|
||||
assert coin_state.spent_height is not None
|
||||
|
||||
assert notified_coins == coins
|
||||
|
||||
# Test getting notification for coin that is about to be created
|
||||
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
|
||||
|
||||
tx_record.spend_bundle.additions()
|
||||
|
||||
added_target: Optional[Coin] = None
|
||||
for coin in tx_record.spend_bundle.additions():
|
||||
if coin.puzzle_hash == puzzle_hash:
|
||||
added_target = coin
|
||||
|
||||
assert added_target is not None
|
||||
|
||||
msg = wallet_protocol.RegisterForCoinUpdates([added_target.name()], 0)
|
||||
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
|
||||
assert msg_response is not None
|
||||
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
|
||||
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
|
||||
assert len(data_response.coin_states) == 0
|
||||
|
||||
await standard_wallet.push_transaction(tx_record)
|
||||
|
||||
await time_out_assert(
|
||||
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
|
||||
)
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
notified_state = None
|
||||
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
for coin_state in data_response.items:
|
||||
if coin_state.coin.name() == added_target.name():
|
||||
notified_state = coin_state
|
||||
|
||||
assert notified_state is not None
|
||||
assert notified_state.coin == added_target
|
||||
assert notified_state.spent_height is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_subscribe_for_ph_reorg(self, wallet_node_simulator):
|
||||
num_blocks = 4
|
||||
long_blocks = 20
|
||||
full_nodes, wallets = wallet_node_simulator
|
||||
full_node_api = full_nodes[0]
|
||||
wallet_node, server_2 = wallets[0]
|
||||
fn_server = full_node_api.full_node.server
|
||||
wsm: WalletStateManager = wallet_node.wallet_state_manager
|
||||
standard_wallet: Wallet = wsm.wallets[1]
|
||||
puzzle_hash = await standard_wallet.get_new_puzzlehash()
|
||||
|
||||
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
|
||||
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
|
||||
|
||||
fake_wallet_peer = fn_server.all_connections[peer_id]
|
||||
zero_ph = 32 * b"\0"
|
||||
|
||||
# Farm to create a coin that we'll track
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
for i in range(0, long_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
msg = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
|
||||
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
|
||||
assert msg_response is not None
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
|
||||
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
|
||||
|
||||
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
|
||||
assert len(coin_records) > 0
|
||||
fork_height = expected_height - num_blocks - 5
|
||||
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
|
||||
await full_node_api.reorg_from_index_to_new_index(req)
|
||||
|
||||
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
|
||||
assert coin_records == []
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
coin_update_messages = []
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
coin_update_messages.append(data_response)
|
||||
|
||||
# First state is creation, second one is a reorg
|
||||
assert len(coin_update_messages) == 2
|
||||
first = coin_update_messages[0]
|
||||
|
||||
assert len(first.items) == 2
|
||||
first_state_coin_1 = first.items[0]
|
||||
assert first_state_coin_1.spent_height is None
|
||||
assert first_state_coin_1.created_height is not None
|
||||
first_state_coin_2 = first.items[1]
|
||||
assert first_state_coin_2.spent_height is None
|
||||
assert first_state_coin_2.created_height is not None
|
||||
|
||||
second = coin_update_messages[1]
|
||||
assert second.fork_height == fork_height
|
||||
assert len(second.items) == 2
|
||||
second_state_coin_1 = second.items[0]
|
||||
assert second_state_coin_1.spent_height is None
|
||||
assert second_state_coin_1.created_height is None
|
||||
second_state_coin_2 = second.items[1]
|
||||
assert second_state_coin_2.spent_height is None
|
||||
assert second_state_coin_2.created_height is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_subscribe_for_coin_id_reorg(self, wallet_node_simulator):
|
||||
num_blocks = 4
|
||||
long_blocks = 20
|
||||
full_nodes, wallets = wallet_node_simulator
|
||||
full_node_api = full_nodes[0]
|
||||
wallet_node, server_2 = wallets[0]
|
||||
fn_server = full_node_api.full_node.server
|
||||
wsm: WalletStateManager = wallet_node.wallet_state_manager
|
||||
standard_wallet: Wallet = wsm.wallets[1]
|
||||
puzzle_hash = await standard_wallet.get_new_puzzlehash()
|
||||
|
||||
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
|
||||
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
|
||||
|
||||
fake_wallet_peer = fn_server.all_connections[peer_id]
|
||||
zero_ph = 32 * b"\0"
|
||||
|
||||
# Farm to create a coin that we'll track
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
for i in range(0, long_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
|
||||
|
||||
for i in range(0, num_blocks):
|
||||
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
|
||||
|
||||
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
|
||||
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
|
||||
|
||||
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
|
||||
assert len(coin_records) > 0
|
||||
|
||||
for coin_rec in coin_records:
|
||||
msg = wallet_protocol.RegisterForCoinUpdates([coin_rec.name], 0)
|
||||
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
|
||||
assert msg_response is not None
|
||||
|
||||
fork_height = expected_height - num_blocks - 5
|
||||
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
|
||||
await full_node_api.reorg_from_index_to_new_index(req)
|
||||
|
||||
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
|
||||
assert coin_records == []
|
||||
|
||||
all_messages = await self.get_all_messages_in_queue(incoming_queue)
|
||||
|
||||
coin_update_messages = []
|
||||
for message in all_messages:
|
||||
if message.type == ProtocolMessageTypes.coin_state_update.value:
|
||||
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
|
||||
coin_update_messages.append(data_response)
|
||||
|
||||
assert len(coin_update_messages) == 1
|
||||
update = coin_update_messages[0]
|
||||
coin_states = update.items
|
||||
assert len(coin_states) == 2
|
||||
first_coin = coin_states[0]
|
||||
assert first_coin.spent_height is None
|
||||
assert first_coin.created_height is None
|
||||
second_coin = coin_states[1]
|
||||
assert second_coin.spent_height is None
|
||||
assert second_coin.created_height is None
|
@ -33,6 +33,7 @@ ANYONE_CAN_SPEND_WITH_PADDING_PUZZLE_HASH = Program.to(binutils.assemble("(a (q
|
||||
POOL_REWARD_PREFIX_MAINNET = bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000")
|
||||
|
||||
MAX_BLOCK_COST_CLVM = int(1e18)
|
||||
COST_PER_BYTE = int(12000)
|
||||
|
||||
|
||||
class PuzzleDB:
|
||||
@ -417,7 +418,9 @@ def spend_coin_to_singleton(
|
||||
coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions)
|
||||
spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())])
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
|
||||
launcher_coin = launcher_spend_bundle.coin_spends[0].coin
|
||||
|
||||
@ -519,7 +522,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=singleton_conditions)
|
||||
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
now.seconds += 500
|
||||
now.height += 1
|
||||
|
||||
@ -548,7 +553,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=singleton_conditions)
|
||||
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
now.seconds += 500
|
||||
now.height += 1
|
||||
|
||||
@ -599,7 +606,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
|
||||
spend_bundle = SpendBundle([singleton_coin_spend], G2Element())
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
|
||||
assert len(list(coin_store.all_unspent_coins())) == 1
|
||||
|
||||
@ -635,7 +644,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
|
||||
spend_bundle.debug()
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
now.seconds += 500
|
||||
now.height += 1
|
||||
|
||||
@ -651,7 +662,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
)
|
||||
spend_bundle = SpendBundle([coin_spend], G2Element())
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
now.seconds += 500
|
||||
now.height += 1
|
||||
change_count = SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
|
||||
@ -688,7 +701,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
)
|
||||
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
now.seconds += 500
|
||||
now.height += 1
|
||||
|
||||
@ -716,7 +731,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
PUZZLE_DB.add_puzzle(full_puzzle)
|
||||
|
||||
try:
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
assert 0
|
||||
except BadSpendBundleError as ex:
|
||||
assert ex.args[0] == "condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED"
|
||||
@ -724,7 +741,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
now.seconds += 350000
|
||||
now.height += 1445
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
|
||||
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
|
||||
|
||||
@ -739,7 +758,9 @@ def test_lifecycle_with_coinstore_as_wallet():
|
||||
spend_bundle = SpendBundle([coin_spend], G2Element())
|
||||
spend_bundle.debug()
|
||||
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(spend_bundle, now, MAX_BLOCK_COST_CLVM)
|
||||
additions, removals = coin_store.update_coin_store_for_spend_bundle(
|
||||
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
|
||||
)
|
||||
update_count = SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
|
||||
|
||||
assert update_count == 0
|
||||
|
Loading…
Reference in New Issue
Block a user