mirror of
https://github.com/Chia-Network/chia-blockchain.git
synced 2024-11-11 01:28:17 +03:00
merged
This commit is contained in:
commit
c2aed75a20
2
.github/workflows/build-test-macos-clvm.yml
vendored
2
.github/workflows/build-test-macos-clvm.yml
vendored
@ -81,7 +81,7 @@ jobs:
|
||||
- name: Test clvm code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/clvm/test_chialisp_deserialization.py tests/clvm/test_clvm_compilation.py tests/clvm/test_program.py tests/clvm/test_puzzle_compression.py tests/clvm/test_puzzles.py tests/clvm/test_serialized_program.py tests/clvm/test_singletons.py tests/clvm/test_spend_sim.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/clvm/test_chialisp_deserialization.py tests/clvm/test_clvm_compilation.py tests/clvm/test_clvm_step.py tests/clvm/test_program.py tests/clvm/test_puzzle_compression.py tests/clvm/test_puzzles.py tests/clvm/test_serialized_program.py tests/clvm/test_singletons.py tests/clvm/test_spend_sim.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -95,7 +95,7 @@ jobs:
|
||||
- name: Test core-util code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/util/test_cached_bls.py tests/core/util/test_config.py tests/core/util/test_db_wrapper.py tests/core/util/test_file_keyring_synchronization.py tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py tests/core/util/test_lru_cache.py tests/core/util/test_significant_bits.py tests/core/util/test_streamable.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/util/test_cached_bls.py tests/core/util/test_config.py tests/core/util/test_db_wrapper.py tests/core/util/test_file_keyring_synchronization.py tests/core/util/test_files.py tests/core/util/test_jsonify.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py tests/core/util/test_lru_cache.py tests/core/util/test_significant_bits.py tests/core/util/test_streamable.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
2
.github/workflows/build-test-macos-util.yml
vendored
2
.github/workflows/build-test-macos-util.yml
vendored
@ -81,7 +81,7 @@ jobs:
|
||||
- name: Test util code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/util/test_chunks.py tests/util/test_full_block_utils.py tests/util/test_lock_queue.py tests/util/test_network_protocol_files.py tests/util/test_paginator.py tests/util/test_struct_stream.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/util/test_chunks.py tests/util/test_full_block_utils.py tests/util/test_lock_queue.py tests/util/test_misc.py tests/util/test_network.py tests/util/test_network_protocol_files.py tests/util/test_paginator.py tests/util/test_struct_stream.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -95,7 +95,7 @@ jobs:
|
||||
- name: Test wallet-nft_wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/nft_wallet/test_nft_clvm.py tests/wallet/nft_wallet/test_nft_wallet.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/nft_wallet/test_nft_clvm.py tests/wallet/nft_wallet/test_nft_puzzles.py tests/wallet/nft_wallet/test_nft_wallet.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -95,7 +95,7 @@ jobs:
|
||||
- name: Test wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/wallet/test_bech32m.py tests/wallet/test_chialisp.py tests/wallet/test_puzzle_store.py tests/wallet/test_singleton.py tests/wallet/test_singleton_lifecycle.py tests/wallet/test_singleton_lifecycle_fast.py tests/wallet/test_taproot.py tests/wallet/test_wallet.py tests/wallet/test_wallet_blockchain.py tests/wallet/test_wallet_interested_store.py tests/wallet/test_wallet_key_val_store.py tests/wallet/test_wallet_store.py tests/wallet/test_wallet_user_store.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/wallet/test_bech32m.py tests/wallet/test_chialisp.py tests/wallet/test_coin_selection.py tests/wallet/test_puzzle_store.py tests/wallet/test_singleton.py tests/wallet/test_singleton_lifecycle.py tests/wallet/test_singleton_lifecycle_fast.py tests/wallet/test_taproot.py tests/wallet/test_wallet.py tests/wallet/test_wallet_blockchain.py tests/wallet/test_wallet_interested_store.py tests/wallet/test_wallet_key_val_store.py tests/wallet/test_wallet_retry.py tests/wallet/test_wallet_store.py tests/wallet/test_wallet_user_store.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test blockchain code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/blockchain/test_blockchain.py tests/blockchain/test_blockchain_transactions.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/blockchain/test_blockchain.py tests/blockchain/test_blockchain_transactions.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
2
.github/workflows/build-test-ubuntu-clvm.yml
vendored
2
.github/workflows/build-test-ubuntu-clvm.yml
vendored
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test clvm code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/clvm/test_chialisp_deserialization.py tests/clvm/test_clvm_compilation.py tests/clvm/test_program.py tests/clvm/test_puzzle_compression.py tests/clvm/test_puzzles.py tests/clvm/test_serialized_program.py tests/clvm/test_singletons.py tests/clvm/test_spend_sim.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/clvm/test_chialisp_deserialization.py tests/clvm/test_clvm_compilation.py tests/clvm/test_clvm_step.py tests/clvm/test_program.py tests/clvm/test_puzzle_compression.py tests/clvm/test_puzzles.py tests/clvm/test_serialized_program.py tests/clvm/test_singletons.py tests/clvm/test_spend_sim.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test core-cmds code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/cmds/test_keys.py tests/core/cmds/test_wallet.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/cmds/test_keys.py tests/core/cmds/test_wallet.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test core-consensus code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/consensus/test_pot_iterations.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/consensus/test_pot_iterations.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test core-custom_types code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/custom_types/test_coin.py tests/core/custom_types/test_proof_of_space.py tests/core/custom_types/test_spend_bundle.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/custom_types/test_coin.py tests/core/custom_types/test_proof_of_space.py tests/core/custom_types/test_spend_bundle.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -98,7 +98,7 @@ jobs:
|
||||
- name: Test core-daemon code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/core/daemon/test_daemon.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/core/daemon/test_daemon.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core-full_node-full_sync code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/full_node/full_sync/test_full_sync.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/full_sync/test_full_sync.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core-full_node-stores code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/stores/test_block_store.py tests/core/full_node/stores/test_coin_store.py tests/core/full_node/stores/test_full_node_store.py tests/core/full_node/stores/test_hint_store.py tests/core/full_node/stores/test_sync_store.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p monitor tests/core/full_node/stores/test_block_store.py tests/core/full_node/stores/test_coin_store.py tests/core/full_node/stores/test_full_node_store.py tests/core/full_node/stores/test_hint_store.py tests/core/full_node/stores/test_sync_store.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core-full_node code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/test_address_manager.py tests/core/full_node/test_block_height_map.py tests/core/full_node/test_conditions.py tests/core/full_node/test_full_node.py tests/core/full_node/test_generator_tools.py tests/core/full_node/test_hint_management.py tests/core/full_node/test_mempool.py tests/core/full_node/test_mempool_performance.py tests/core/full_node/test_node_load.py tests/core/full_node/test_peer_store_resolver.py tests/core/full_node/test_performance.py tests/core/full_node/test_transactions.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p monitor tests/core/full_node/test_address_manager.py tests/core/full_node/test_block_height_map.py tests/core/full_node/test_conditions.py tests/core/full_node/test_full_node.py tests/core/full_node/test_generator_tools.py tests/core/full_node/test_hint_management.py tests/core/full_node/test_mempool.py tests/core/full_node/test_mempool_performance.py tests/core/full_node/test_node_load.py tests/core/full_node/test_peer_store_resolver.py tests/core/full_node/test_performance.py tests/core/full_node/test_transactions.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core-server code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/core/server/test_dos.py tests/core/server/test_rate_limits.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/core/server/test_dos.py tests/core/server/test_rate_limits.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core-ssl code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/ssl/test_ssl.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/ssl/test_ssl.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core-util code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/util/test_cached_bls.py tests/core/util/test_config.py tests/core/util/test_db_wrapper.py tests/core/util/test_file_keyring_synchronization.py tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py tests/core/util/test_lru_cache.py tests/core/util/test_significant_bits.py tests/core/util/test_streamable.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/util/test_cached_bls.py tests/core/util/test_config.py tests/core/util/test_db_wrapper.py tests/core/util/test_file_keyring_synchronization.py tests/core/util/test_files.py tests/core/util/test_jsonify.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py tests/core/util/test_lru_cache.py tests/core/util/test_significant_bits.py tests/core/util/test_streamable.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
2
.github/workflows/build-test-ubuntu-core.yml
vendored
2
.github/workflows/build-test-ubuntu-core.yml
vendored
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test core code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/test_coins.py tests/core/test_cost_calculation.py tests/core/test_crawler_rpc.py tests/core/test_daemon_rpc.py tests/core/test_db_conversion.py tests/core/test_db_validation.py tests/core/test_farmer_harvester_rpc.py tests/core/test_filter.py tests/core/test_full_node_rpc.py tests/core/test_merkle_set.py tests/core/test_setproctitle.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/test_coins.py tests/core/test_cost_calculation.py tests/core/test_crawler_rpc.py tests/core/test_daemon_rpc.py tests/core/test_db_conversion.py tests/core/test_db_validation.py tests/core/test_farmer_harvester_rpc.py tests/core/test_filter.py tests/core/test_full_node_rpc.py tests/core/test_merkle_set.py tests/core/test_setproctitle.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test farmer_harvester code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/farmer_harvester/test_farmer_harvester.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/farmer_harvester/test_farmer_harvester.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test generator code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py tests/generator/test_rom.py tests/generator/test_scan.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py tests/generator/test_rom.py tests/generator/test_scan.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test plot_sync code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/plot_sync/test_delta.py tests/plot_sync/test_plot_sync.py tests/plot_sync/test_receiver.py tests/plot_sync/test_sender.py tests/plot_sync/test_sync_simulated.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/plot_sync/test_delta.py tests/plot_sync/test_plot_sync.py tests/plot_sync/test_receiver.py tests/plot_sync/test_sender.py tests/plot_sync/test_sync_simulated.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test plotting code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/plotting/test_plot_manager.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/plotting/test_plot_manager.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test pools code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 2 -m "not benchmark" -p no:monitor tests/pools/test_pool_cmdline.py tests/pools/test_pool_config.py tests/pools/test_pool_puzzles_lifecycle.py tests/pools/test_pool_rpc.py tests/pools/test_pool_wallet.py tests/pools/test_wallet_pool_store.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 2 -m "not benchmark" tests/pools/test_pool_cmdline.py tests/pools/test_pool_config.py tests/pools/test_pool_puzzles_lifecycle.py tests/pools/test_pool_rpc.py tests/pools/test_pool_wallet.py tests/pools/test_wallet_pool_store.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -98,7 +98,7 @@ jobs:
|
||||
- name: Test simulation code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/simulation/test_simulation.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/simulation/test_simulation.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test tools code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/tools/test_full_sync.py tests/tools/test_run_block.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/tools/test_full_sync.py tests/tools/test_run_block.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
2
.github/workflows/build-test-ubuntu-util.yml
vendored
2
.github/workflows/build-test-ubuntu-util.yml
vendored
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test util code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/util/test_chunks.py tests/util/test_full_block_utils.py tests/util/test_lock_queue.py tests/util/test_network_protocol_files.py tests/util/test_paginator.py tests/util/test_struct_stream.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/util/test_chunks.py tests/util/test_full_block_utils.py tests/util/test_lock_queue.py tests/util/test_misc.py tests/util/test_network.py tests/util/test_network_protocol_files.py tests/util/test_paginator.py tests/util/test_struct_stream.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet-cat_wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/cat_wallet/test_cat_lifecycle.py tests/wallet/cat_wallet/test_cat_wallet.py tests/wallet/cat_wallet/test_offer_lifecycle.py tests/wallet/cat_wallet/test_trades.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/cat_wallet/test_cat_lifecycle.py tests/wallet/cat_wallet/test_cat_wallet.py tests/wallet/cat_wallet/test_offer_lifecycle.py tests/wallet/cat_wallet/test_trades.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet-did_wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/did_wallet/test_did.py tests/wallet/did_wallet/test_did_rpc.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/did_wallet/test_did.py tests/wallet/did_wallet/test_did_rpc.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet-nft_wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/nft_wallet/test_nft_clvm.py tests/wallet/nft_wallet/test_nft_wallet.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/nft_wallet/test_nft_clvm.py tests/wallet/nft_wallet/test_nft_puzzles.py tests/wallet/nft_wallet/test_nft_wallet.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -80,7 +80,7 @@ jobs:
|
||||
- name: Test wallet-rl_wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/rl_wallet/test_rl_rpc.py tests/wallet/rl_wallet/test_rl_wallet.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/rl_wallet/test_rl_rpc.py tests/wallet/rl_wallet/test_rl_wallet.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet-rpc code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/rpc/test_wallet_rpc.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/rpc/test_wallet_rpc.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet-simple_sync code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/simple_sync/test_simple_sync_protocol.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/simple_sync/test_simple_sync_protocol.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet-sync code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/sync/test_wallet_sync.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/sync/test_wallet_sync.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test wallet code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/wallet/test_bech32m.py tests/wallet/test_chialisp.py tests/wallet/test_puzzle_store.py tests/wallet/test_singleton.py tests/wallet/test_singleton_lifecycle.py tests/wallet/test_singleton_lifecycle_fast.py tests/wallet/test_taproot.py tests/wallet/test_wallet.py tests/wallet/test_wallet_blockchain.py tests/wallet/test_wallet_interested_store.py tests/wallet/test_wallet_key_val_store.py tests/wallet/test_wallet_store.py tests/wallet/test_wallet_user_store.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/wallet/test_bech32m.py tests/wallet/test_chialisp.py tests/wallet/test_coin_selection.py tests/wallet/test_puzzle_store.py tests/wallet/test_singleton.py tests/wallet/test_singleton_lifecycle.py tests/wallet/test_singleton_lifecycle_fast.py tests/wallet/test_taproot.py tests/wallet/test_wallet.py tests/wallet/test_wallet_blockchain.py tests/wallet/test_wallet_interested_store.py tests/wallet/test_wallet_key_val_store.py tests/wallet/test_wallet_retry.py tests/wallet/test_wallet_store.py tests/wallet/test_wallet_user_store.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
||||
- name: Test weight_proof code with pytest
|
||||
run: |
|
||||
. ./activate
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/weight_proof/test_weight_proof.py
|
||||
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/weight_proof/test_weight_proof.py
|
||||
|
||||
- name: Process coverage data
|
||||
run: |
|
||||
|
@ -194,18 +194,17 @@ extend_skip=
|
||||
tests/util/db_connection.py
|
||||
tests/util/keyring.py
|
||||
tests/util/key_tool.py
|
||||
tests/util/misc.py
|
||||
tests/util/network_protocol_data.py
|
||||
tests/util/network.py
|
||||
tests/util/test_lock_queue.py
|
||||
tests/util/test_misc.py
|
||||
tests/util/test_network_protocol_files.py
|
||||
tests/util/test_network.py
|
||||
tests/util/test_struct_stream.py
|
||||
tests/wallet/cat_wallet/test_cat_lifecycle.py
|
||||
tests/wallet/cat_wallet/test_cat_wallet.py
|
||||
tests/wallet/cat_wallet/test_offer_lifecycle.py
|
||||
tests/wallet/did_wallet/test_did.py
|
||||
tests/wallet/did_wallet/test_did_rpc.py
|
||||
tests/wallet/rpc/test_wallet_rpc.py
|
||||
tests/wallet/simple_sync/test_simple_sync_protocol.py
|
||||
tests/wallet/test_singleton_lifecycle_fast.py
|
||||
tests/wallet/test_singleton_lifecycle.py
|
||||
|
25
CHANGELOG.md
25
CHANGELOG.md
@ -8,9 +8,30 @@ for setuptools_scm/PEP 440 reasons.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## 1.3.4 Chia blockchain 2022-4-19
|
||||
### What's Changed
|
||||
|
||||
## What's Changed
|
||||
## 1.3.5 Chia blockchain 2022-5-11
|
||||
|
||||
### Added
|
||||
|
||||
- Added Support for Python 3.10
|
||||
- Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml
|
||||
- Added CLI only `.rpm` and `.deb` packages to official release channels
|
||||
- Fixed an issue where some coins would be missing after a full sync
|
||||
- Enabled paginated plot loading and improved plot state reporting
|
||||
- Updated the farming GUI tab to fix several bugs
|
||||
- Fix infinite loop with timelord closing
|
||||
- Simplified install.sh ubuntu version tracking
|
||||
- Fixed memory leak on the farm page
|
||||
- Fixed list of plot files "in progress"
|
||||
- Various farmer rpc improvements
|
||||
- Improvements to the harvester `get_plots` RPC
|
||||
|
||||
### Known Issues
|
||||
|
||||
There is a known issue where harvesters will not reconnect to the farmer automatically unless you restart the harvester. This bug was introduced in 1.3.4 and we plan to patch it in a coming release.
|
||||
|
||||
## 1.3.4 Chia blockchain 2022-4-19
|
||||
|
||||
### Added
|
||||
|
||||
|
@ -72,6 +72,7 @@ async def main(db_path: Path):
|
||||
)
|
||||
|
||||
peak = blockchain.get_peak()
|
||||
assert peak is not None
|
||||
timing = 0.0
|
||||
for i in range(REPETITIONS):
|
||||
block = BlockInfo(
|
||||
|
23
benchmarks/jsonify.py
Normal file
23
benchmarks/jsonify.py
Normal file
@ -0,0 +1,23 @@
|
||||
import random
|
||||
from time import perf_counter
|
||||
|
||||
from tests.util.test_full_block_utils import get_full_blocks
|
||||
|
||||
random.seed(123456789)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
total_time = 0.0
|
||||
counter = 0
|
||||
for block in get_full_blocks():
|
||||
start = perf_counter()
|
||||
block.to_json_dict()
|
||||
end = perf_counter()
|
||||
total_time += end - start
|
||||
counter += 1
|
||||
|
||||
print(f"total time: {total_time:0.2f}s ({counter} iterations)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1 +1 @@
|
||||
Subproject commit 725d77abbae77463fea725396e169f4148b810b6
|
||||
Subproject commit 32245d869c70eedbb2a5fe77618b107dda70a647
|
@ -215,6 +215,7 @@ def derive_cmd(ctx: click.Context, fingerprint: Optional[int], filename: Optiona
|
||||
"non-observer derivation should be used at that index. Example HD path: m/12381n/8444n/2/",
|
||||
type=str,
|
||||
)
|
||||
@click.option("--prefix", "-x", help="Address prefix (xch for mainnet, txch for testnet)", default=None, type=str)
|
||||
@click.pass_context
|
||||
def search_cmd(
|
||||
ctx: click.Context,
|
||||
@ -224,6 +225,7 @@ def search_cmd(
|
||||
show_progress: bool,
|
||||
search_type: Tuple[str, ...],
|
||||
derive_from_hd_path: Optional[str],
|
||||
prefix: Optional[str],
|
||||
):
|
||||
import sys
|
||||
from .keys_funcs import search_derive, resolve_derivation_master_key
|
||||
@ -238,6 +240,7 @@ def search_cmd(
|
||||
private_key = resolve_derivation_master_key(filename if filename is not None else fingerprint)
|
||||
|
||||
found: bool = search_derive(
|
||||
ctx.obj["root_path"],
|
||||
private_key,
|
||||
search_terms,
|
||||
limit,
|
||||
@ -245,6 +248,7 @@ def search_cmd(
|
||||
show_progress,
|
||||
("all",) if "all" in search_type else search_type,
|
||||
derive_from_hd_path,
|
||||
prefix,
|
||||
)
|
||||
|
||||
sys.exit(0 if found else 1)
|
||||
|
@ -245,6 +245,7 @@ def _search_derived(
|
||||
search_public_key: bool,
|
||||
search_private_key: bool,
|
||||
search_address: bool,
|
||||
prefix: str,
|
||||
) -> List[str]: # Return a subset of search_terms that were found
|
||||
"""
|
||||
Performs a shallow search of keys derived from the current sk for items matching
|
||||
@ -291,7 +292,7 @@ def _search_derived(
|
||||
if search_address:
|
||||
# Generate a wallet address using the standard p2_delegated_puzzle_or_hidden_puzzle puzzle
|
||||
# TODO: consider generating addresses using other puzzles
|
||||
address = encode_puzzle_hash(create_puzzlehash_for_pk(child_pk), "xch")
|
||||
address = encode_puzzle_hash(create_puzzlehash_for_pk(child_pk), prefix)
|
||||
|
||||
for term in remaining_search_terms:
|
||||
found_item: Any = None
|
||||
@ -344,6 +345,7 @@ def _search_derived(
|
||||
|
||||
|
||||
def search_derive(
|
||||
root_path: Path,
|
||||
private_key: Optional[PrivateKey],
|
||||
search_terms: Tuple[str, ...],
|
||||
limit: int,
|
||||
@ -351,6 +353,7 @@ def search_derive(
|
||||
show_progress: bool,
|
||||
search_types: Tuple[str, ...],
|
||||
derive_from_hd_path: Optional[str],
|
||||
prefix: Optional[str],
|
||||
) -> bool:
|
||||
"""
|
||||
Searches for items derived from the provided private key, or if not specified,
|
||||
@ -366,6 +369,11 @@ def search_derive(
|
||||
search_public_key = "public_key" in search_types
|
||||
search_private_key = "private_key" in search_types
|
||||
|
||||
if prefix is None:
|
||||
config: Dict = load_config(root_path, "config.yaml")
|
||||
selected: str = config["selected_network"]
|
||||
prefix = config["network_overrides"]["config"][selected]["address_prefix"]
|
||||
|
||||
if "all" in search_types:
|
||||
search_address = True
|
||||
search_public_key = True
|
||||
@ -402,6 +410,7 @@ def search_derive(
|
||||
search_public_key,
|
||||
search_private_key,
|
||||
search_address,
|
||||
prefix,
|
||||
)
|
||||
|
||||
# Update remaining_search_terms
|
||||
@ -447,6 +456,7 @@ def search_derive(
|
||||
search_public_key,
|
||||
search_private_key,
|
||||
search_address,
|
||||
prefix,
|
||||
)
|
||||
|
||||
# Update remaining_search_terms
|
||||
|
@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
@ -37,7 +38,8 @@ async def create_start_daemon_connection(root_path: Path) -> Optional[DaemonProx
|
||||
if await connection.is_keyring_locked():
|
||||
passphrase = Keychain.get_cached_master_passphrase()
|
||||
if not Keychain.master_passphrase_is_valid(passphrase):
|
||||
passphrase = get_current_passphrase()
|
||||
with ThreadPoolExecutor(max_workers=1, thread_name_prefix="get_current_passphrase") as executor:
|
||||
passphrase = await asyncio.get_running_loop().run_in_executor(executor, get_current_passphrase)
|
||||
|
||||
if passphrase:
|
||||
print("Unlocking daemon keyring")
|
||||
|
@ -573,7 +573,7 @@ def nft_add_uri_cmd(
|
||||
@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
|
||||
@click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True)
|
||||
@click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to transfer", type=str, required=True)
|
||||
@click.option("-aa", "--artist-address", help="Target artist's wallet address", type=str, required=True)
|
||||
@click.option("-ta", "--target-address", help="Target recipient wallet address", type=str, required=True)
|
||||
@click.option(
|
||||
"-m",
|
||||
"--fee",
|
||||
@ -588,7 +588,7 @@ def nft_transfer_cmd(
|
||||
fingerprint: int,
|
||||
id: int,
|
||||
nft_coin_id: str,
|
||||
artist_address: str,
|
||||
target_address: str,
|
||||
fee: str,
|
||||
) -> None:
|
||||
import asyncio
|
||||
@ -597,7 +597,7 @@ def nft_transfer_cmd(
|
||||
extra_params = {
|
||||
"wallet_id": id,
|
||||
"nft_coin_id": nft_coin_id,
|
||||
"artist_address": artist_address,
|
||||
"target_address": target_address,
|
||||
"fee": fee,
|
||||
}
|
||||
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, transfer_nft))
|
||||
|
@ -681,11 +681,31 @@ async def create_nft_wallet(args: Dict, wallet_client: WalletRpcClient, fingerpr
|
||||
async def mint_nft(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
|
||||
try:
|
||||
wallet_id = args["wallet_id"]
|
||||
artist_address = args["artist_address"]
|
||||
royalty_address = args.get("royalty_address", None)
|
||||
target_address = args.get("target_address", None)
|
||||
hash = args["hash"]
|
||||
uris = args["uris"]
|
||||
meta_hash = args.get("meta_hash", None)
|
||||
meta_uris = args.get("meta_uris", None)
|
||||
license_hash = args.get("license_hash", None)
|
||||
license_uris = args.get("license_uris", None)
|
||||
series_total = args.get("series_total", None)
|
||||
series_number = args.get("series_number", None)
|
||||
fee = args["fee"]
|
||||
response = await wallet_client.mint_nft(wallet_id, artist_address, hash, uris, fee)
|
||||
response = await wallet_client.mint_nft(
|
||||
wallet_id,
|
||||
royalty_address,
|
||||
target_address,
|
||||
hash,
|
||||
uris,
|
||||
meta_hash,
|
||||
meta_uris,
|
||||
license_hash,
|
||||
license_uris,
|
||||
series_total,
|
||||
series_number,
|
||||
fee,
|
||||
)
|
||||
spend_bundle = response["spend_bundle"]
|
||||
print(f"NFT minted Successfully with spend bundle: {spend_bundle}")
|
||||
except Exception as e:
|
||||
@ -698,7 +718,8 @@ async def add_uri_to_nft(args: Dict, wallet_client: WalletRpcClient, fingerprint
|
||||
nft_coin_id = args["nft_coin_id"]
|
||||
uri = args["uri"]
|
||||
fee = args["fee"]
|
||||
response = await wallet_client.add_uri_to_nft(wallet_id, nft_coin_id, uri, fee)
|
||||
key = args.get("meta_uri", "u")
|
||||
response = await wallet_client.add_uri_to_nft(wallet_id, nft_coin_id, key, uri, fee)
|
||||
spend_bundle = response["spend_bundle"]
|
||||
print(f"URI added successfully with spend bundle: {spend_bundle}")
|
||||
except Exception as e:
|
||||
@ -709,9 +730,9 @@ async def transfer_nft(args: Dict, wallet_client: WalletRpcClient, fingerprint:
|
||||
try:
|
||||
wallet_id = args["wallet_id"]
|
||||
nft_coin_id = args["nft_coin_id"]
|
||||
artist_address = args["artist_address"]
|
||||
target_address = args["target_address"]
|
||||
fee = args["fee"]
|
||||
response = await wallet_client.transfer_nft(wallet_id, nft_coin_id, artist_address, fee)
|
||||
response = await wallet_client.transfer_nft(wallet_id, nft_coin_id, target_address, fee)
|
||||
spend_bundle = response["spend_bundle"]
|
||||
print(f"NFT transferred successfully with spend bundle: {spend_bundle}")
|
||||
except Exception as e:
|
||||
|
@ -44,7 +44,7 @@ async def validate_block_body(
|
||||
fork_point_with_peak: Optional[uint32],
|
||||
get_block_generator: Callable[[BlockInfo], Awaitable[Optional[BlockGenerator]]],
|
||||
*,
|
||||
validate_signature=True,
|
||||
validate_signature: bool = True,
|
||||
) -> Tuple[Optional[Err], Optional[NPCResult]]:
|
||||
"""
|
||||
This assumes the header block has been completely validated.
|
||||
@ -147,12 +147,16 @@ async def validate_block_body(
|
||||
return Err.INVALID_REWARD_COINS, None
|
||||
|
||||
removals: List[bytes32] = []
|
||||
coinbase_additions: List[Coin] = list(expected_reward_coins)
|
||||
additions: List[Coin] = []
|
||||
|
||||
# we store coins paired with their names in order to avoid computing the
|
||||
# coin name multiple times, we store it next to the coin while validating
|
||||
# the block
|
||||
coinbase_additions: List[Tuple[Coin, bytes32]] = [(c, c.name()) for c in expected_reward_coins]
|
||||
additions: List[Tuple[Coin, bytes32]] = []
|
||||
removals_puzzle_dic: Dict[bytes32, bytes32] = {}
|
||||
cost: uint64 = uint64(0)
|
||||
|
||||
# In header validation we check that timestamp is not more that 5 minutes into the future
|
||||
# In header validation we check that timestamp is not more than 5 minutes into the future
|
||||
# 6. No transactions before INITIAL_TRANSACTION_FREEZE timestamp
|
||||
# (this test has been removed)
|
||||
|
||||
@ -210,7 +214,8 @@ async def validate_block_body(
|
||||
removals.append(spend.coin_id)
|
||||
removals_puzzle_dic[spend.coin_id] = spend.puzzle_hash
|
||||
for puzzle_hash, amount, _ in spend.create_coin:
|
||||
additions.append(Coin(spend.coin_id, puzzle_hash, uint64(amount)))
|
||||
c = Coin(spend.coin_id, puzzle_hash, uint64(amount))
|
||||
additions.append((c, c.name()))
|
||||
else:
|
||||
assert npc_result is None
|
||||
|
||||
@ -222,8 +227,8 @@ async def validate_block_body(
|
||||
# 10. Check additions for max coin amount
|
||||
# Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer
|
||||
# We will not even reach here because Coins do type checking (uint64)
|
||||
for coin in additions + coinbase_additions:
|
||||
additions_dic[coin.name()] = coin
|
||||
for coin, coin_name in additions + coinbase_additions:
|
||||
additions_dic[coin_name] = coin
|
||||
if coin.amount < 0:
|
||||
return Err.COIN_AMOUNT_NEGATIVE, None
|
||||
|
||||
@ -243,7 +248,7 @@ async def validate_block_body(
|
||||
# 12. The additions and removals must result in the correct filter
|
||||
byte_array_tx: List[bytearray] = []
|
||||
|
||||
for coin in additions + coinbase_additions:
|
||||
for coin, _ in additions + coinbase_additions:
|
||||
byte_array_tx.append(bytearray(coin.puzzle_hash))
|
||||
for coin_name in removals:
|
||||
byte_array_tx.append(bytearray(coin_name))
|
||||
@ -256,7 +261,7 @@ async def validate_block_body(
|
||||
return Err.INVALID_TRANSACTIONS_FILTER_HASH, None
|
||||
|
||||
# 13. Check for duplicate outputs in additions
|
||||
addition_counter = collections.Counter(_.name() for _ in additions + coinbase_additions)
|
||||
addition_counter = collections.Counter(coin_name for _, coin_name in additions + coinbase_additions)
|
||||
for k, v in addition_counter.items():
|
||||
if v > 1:
|
||||
return Err.DUPLICATE_OUTPUT, None
|
||||
@ -322,14 +327,16 @@ async def validate_block_body(
|
||||
assert c_name not in removals_since_fork
|
||||
removals_since_fork.add(c_name)
|
||||
for c in additions_in_curr:
|
||||
assert c.name() not in additions_since_fork
|
||||
coin_name = c.name()
|
||||
assert coin_name not in additions_since_fork
|
||||
assert curr.foliage_transaction_block is not None
|
||||
additions_since_fork[c.name()] = (c, curr.height, curr.foliage_transaction_block.timestamp)
|
||||
additions_since_fork[coin_name] = (c, curr.height, curr.foliage_transaction_block.timestamp)
|
||||
|
||||
for coinbase_coin in curr.get_included_reward_coins():
|
||||
assert coinbase_coin.name() not in additions_since_fork
|
||||
coin_name = coinbase_coin.name()
|
||||
assert coin_name not in additions_since_fork
|
||||
assert curr.foliage_transaction_block is not None
|
||||
additions_since_fork[coinbase_coin.name()] = (
|
||||
additions_since_fork[coin_name] = (
|
||||
coinbase_coin,
|
||||
curr.height,
|
||||
curr.foliage_transaction_block.timestamp,
|
||||
@ -409,7 +416,7 @@ async def validate_block_body(
|
||||
removed += unspent.coin.amount
|
||||
|
||||
added = 0
|
||||
for coin in additions:
|
||||
for coin, _ in additions:
|
||||
added += coin.amount
|
||||
|
||||
# 16. Check that the total coin amount for added is <= removed
|
||||
|
@ -5,6 +5,7 @@ from typing import Callable, Dict, List, Optional, Tuple
|
||||
|
||||
import blspy
|
||||
from blspy import G1Element, G2Element
|
||||
from chia_rs import compute_merkle_set_root
|
||||
from chiabip158 import PyBIP158
|
||||
|
||||
from chia.consensus.block_record import BlockRecord
|
||||
@ -28,7 +29,6 @@ from chia.types.generator_types import BlockGenerator
|
||||
from chia.types.unfinished_block import UnfinishedBlock
|
||||
from chia.util.hash import std_hash
|
||||
from chia.util.ints import uint8, uint32, uint64, uint128
|
||||
from chia.util.merkle_set import MerkleSet
|
||||
from chia.util.prev_transaction_block import get_prev_transaction_block
|
||||
from chia.util.recursive_replace import recursive_replace
|
||||
|
||||
@ -198,12 +198,7 @@ def create_foliage(
|
||||
bip158: PyBIP158 = PyBIP158(byte_array_tx)
|
||||
encoded = bytes(bip158.GetEncoded())
|
||||
|
||||
removal_merkle_set = MerkleSet()
|
||||
addition_merkle_set = MerkleSet()
|
||||
|
||||
# Create removal Merkle set
|
||||
for coin_name in tx_removals:
|
||||
removal_merkle_set.add_already_hashed(coin_name)
|
||||
additions_merkle_items: List[bytes32] = []
|
||||
|
||||
# Create addition Merkle set
|
||||
puzzlehash_coin_map: Dict[bytes32, List[bytes32]] = {}
|
||||
@ -216,11 +211,11 @@ def create_foliage(
|
||||
|
||||
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
|
||||
for puzzle, coin_ids in puzzlehash_coin_map.items():
|
||||
addition_merkle_set.add_already_hashed(puzzle)
|
||||
addition_merkle_set.add_already_hashed(hash_coin_ids(coin_ids))
|
||||
additions_merkle_items.append(puzzle)
|
||||
additions_merkle_items.append(hash_coin_ids(coin_ids))
|
||||
|
||||
additions_root = addition_merkle_set.get_root()
|
||||
removals_root = removal_merkle_set.get_root()
|
||||
additions_root = bytes32(compute_merkle_set_root(additions_merkle_items))
|
||||
removals_root = bytes32(compute_merkle_set_root(tx_removals))
|
||||
|
||||
generator_hash = bytes32([0] * 32)
|
||||
if block_generator is not None:
|
||||
@ -303,7 +298,7 @@ def create_unfinished_block(
|
||||
additions: Optional[List[Coin]] = None,
|
||||
removals: Optional[List[Coin]] = None,
|
||||
prev_block: Optional[BlockRecord] = None,
|
||||
finished_sub_slots_input: List[EndOfSubSlotBundle] = None,
|
||||
finished_sub_slots_input: Optional[List[EndOfSubSlotBundle]] = None,
|
||||
) -> UnfinishedBlock:
|
||||
"""
|
||||
Creates a new unfinished block using all the information available at the signage point. This will have to be
|
||||
@ -518,8 +513,9 @@ def unfinished_block_to_full_block(
|
||||
new_generator,
|
||||
new_generator_ref_list,
|
||||
)
|
||||
return recursive_replace(
|
||||
ret = recursive_replace(
|
||||
ret,
|
||||
"foliage.reward_block_hash",
|
||||
ret.reward_chain_block.get_hash(),
|
||||
)
|
||||
return ret
|
||||
|
@ -44,7 +44,7 @@ def validate_unfinished_header_block(
|
||||
expected_sub_slot_iters: uint64,
|
||||
skip_overflow_last_ss_validation: bool = False,
|
||||
skip_vdf_is_valid: bool = False,
|
||||
check_sub_epoch_summary=True,
|
||||
check_sub_epoch_summary: bool = True,
|
||||
) -> Tuple[Optional[uint64], Optional[ValidationError]]:
|
||||
"""
|
||||
Validates an unfinished header block. This is a block without the infusion VDFs (unfinished)
|
||||
@ -831,7 +831,7 @@ def validate_finished_header_block(
|
||||
check_filter: bool,
|
||||
expected_difficulty: uint64,
|
||||
expected_sub_slot_iters: uint64,
|
||||
check_sub_epoch_summary=True,
|
||||
check_sub_epoch_summary: bool = True,
|
||||
) -> Tuple[Optional[uint64], Optional[ValidationError]]:
|
||||
"""
|
||||
Fully validates the header of a block. A header block is the same as a full block, but
|
||||
|
@ -85,5 +85,5 @@ class BlockRecord(Streamable):
|
||||
self.required_iters,
|
||||
)
|
||||
|
||||
def sp_total_iters(self, constants: ConsensusConstants):
|
||||
return self.sp_sub_slot_total_iters(constants) + self.sp_iters(constants)
|
||||
def sp_total_iters(self, constants: ConsensusConstants) -> uint128:
|
||||
return uint128(self.sp_sub_slot_total_iters(constants) + self.sp_iters(constants))
|
||||
|
@ -1,44 +1,40 @@
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from chia_rs import compute_merkle_set_root
|
||||
|
||||
from chia.types.blockchain_format.coin import Coin, hash_coin_ids
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.errors import Err
|
||||
from chia.util.merkle_set import MerkleSet
|
||||
|
||||
|
||||
def validate_block_merkle_roots(
|
||||
block_additions_root: bytes32,
|
||||
block_removals_root: bytes32,
|
||||
tx_additions: List[Coin] = None,
|
||||
tx_removals: List[bytes32] = None,
|
||||
tx_additions: Optional[List[Tuple[Coin, bytes32]]] = None,
|
||||
tx_removals: Optional[List[bytes32]] = None,
|
||||
) -> Optional[Err]:
|
||||
if tx_removals is None:
|
||||
tx_removals = []
|
||||
if tx_additions is None:
|
||||
tx_additions = []
|
||||
removal_merkle_set = MerkleSet()
|
||||
addition_merkle_set = MerkleSet()
|
||||
|
||||
# Create removal Merkle set
|
||||
for coin_name in tx_removals:
|
||||
removal_merkle_set.add_already_hashed(coin_name)
|
||||
|
||||
# Create addition Merkle set
|
||||
puzzlehash_coins_map: Dict[bytes32, List[bytes32]] = {}
|
||||
|
||||
for coin in tx_additions:
|
||||
for coin, coin_name in tx_additions:
|
||||
if coin.puzzle_hash in puzzlehash_coins_map:
|
||||
puzzlehash_coins_map[coin.puzzle_hash].append(coin.name())
|
||||
puzzlehash_coins_map[coin.puzzle_hash].append(coin_name)
|
||||
else:
|
||||
puzzlehash_coins_map[coin.puzzle_hash] = [coin.name()]
|
||||
puzzlehash_coins_map[coin.puzzle_hash] = [coin_name]
|
||||
|
||||
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
|
||||
additions_merkle_items: List[bytes32] = []
|
||||
for puzzle, coin_ids in puzzlehash_coins_map.items():
|
||||
addition_merkle_set.add_already_hashed(puzzle)
|
||||
addition_merkle_set.add_already_hashed(hash_coin_ids(coin_ids))
|
||||
additions_merkle_items.append(puzzle)
|
||||
additions_merkle_items.append(hash_coin_ids(coin_ids))
|
||||
|
||||
additions_root = addition_merkle_set.get_root()
|
||||
removals_root = removal_merkle_set.get_root()
|
||||
additions_root = bytes32(compute_merkle_set_root(additions_merkle_items))
|
||||
removals_root = bytes32(compute_merkle_set_root(tx_removals))
|
||||
|
||||
if block_additions_root != additions_root:
|
||||
return Err.BAD_ADDITION_ROOT
|
||||
|
@ -8,7 +8,7 @@ from concurrent.futures.process import ProcessPoolExecutor
|
||||
from enum import Enum
|
||||
from multiprocessing.context import BaseContext
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
from chia.consensus.block_body_validation import validate_block_body
|
||||
from chia.consensus.block_header_validation import validate_unfinished_header_block
|
||||
@ -78,7 +78,7 @@ class StateChangeSummary:
|
||||
|
||||
class Blockchain(BlockchainInterface):
|
||||
constants: ConsensusConstants
|
||||
constants_json: Dict
|
||||
constants_json: Dict[str, Any]
|
||||
|
||||
# peak of the blockchain
|
||||
_peak_height: Optional[uint32]
|
||||
@ -117,7 +117,7 @@ class Blockchain(BlockchainInterface):
|
||||
multiprocessing_context: Optional[BaseContext] = None,
|
||||
*,
|
||||
single_threaded: bool = False,
|
||||
):
|
||||
) -> "Blockchain":
|
||||
"""
|
||||
Initializes a blockchain with the BlockRecords from disk, assuming they have all been
|
||||
validated. Uses the genesis block given in override_constants, or as a fallback,
|
||||
@ -144,18 +144,18 @@ class Blockchain(BlockchainInterface):
|
||||
self.constants = consensus_constants
|
||||
self.coin_store = coin_store
|
||||
self.block_store = block_store
|
||||
self.constants_json = recurse_jsonify(dataclasses.asdict(self.constants))
|
||||
self.constants_json = recurse_jsonify(self.constants)
|
||||
self._shut_down = False
|
||||
await self._load_chain_from_store(blockchain_dir)
|
||||
self._seen_compact_proofs = set()
|
||||
self.hint_store = hint_store
|
||||
return self
|
||||
|
||||
def shut_down(self):
|
||||
def shut_down(self) -> None:
|
||||
self._shut_down = True
|
||||
self.pool.shutdown(wait=True)
|
||||
|
||||
async def _load_chain_from_store(self, blockchain_dir):
|
||||
async def _load_chain_from_store(self, blockchain_dir: Path) -> None:
|
||||
"""
|
||||
Initializes the state of the Blockchain class from the database.
|
||||
"""
|
||||
@ -174,7 +174,7 @@ class Blockchain(BlockchainInterface):
|
||||
assert peak is not None
|
||||
self._peak_height = self.block_record(peak).height
|
||||
assert self.__height_map.contains_height(self._peak_height)
|
||||
assert not self.__height_map.contains_height(self._peak_height + 1)
|
||||
assert not self.__height_map.contains_height(uint32(self._peak_height + 1))
|
||||
|
||||
def get_peak(self) -> Optional[BlockRecord]:
|
||||
"""
|
||||
@ -539,7 +539,7 @@ class Blockchain(BlockchainInterface):
|
||||
return list(reversed(recent_rc))
|
||||
|
||||
async def validate_unfinished_block(
|
||||
self, block: UnfinishedBlock, npc_result: Optional[NPCResult], skip_overflow_ss_validation=True
|
||||
self, block: UnfinishedBlock, npc_result: Optional[NPCResult], skip_overflow_ss_validation: bool = True
|
||||
) -> PreValidationResult:
|
||||
if (
|
||||
not self.contains_block(block.prev_header_hash)
|
||||
@ -632,9 +632,9 @@ class Blockchain(BlockchainInterface):
|
||||
npc_result_bytes = await task
|
||||
if npc_result_bytes is None:
|
||||
raise ConsensusError(Err.UNKNOWN)
|
||||
ret = NPCResult.from_bytes(npc_result_bytes)
|
||||
ret: NPCResult = NPCResult.from_bytes(npc_result_bytes)
|
||||
if ret.error is not None:
|
||||
raise ConsensusError(ret.error)
|
||||
raise ConsensusError(Err(ret.error))
|
||||
return ret
|
||||
|
||||
def contains_block(self, header_hash: bytes32) -> bool:
|
||||
@ -671,7 +671,7 @@ class Blockchain(BlockchainInterface):
|
||||
def get_peak_height(self) -> Optional[uint32]:
|
||||
return self._peak_height
|
||||
|
||||
async def warmup(self, fork_point: uint32):
|
||||
async def warmup(self, fork_point: uint32) -> None:
|
||||
"""
|
||||
Loads blocks into the cache. The blocks loaded include all blocks from
|
||||
fork point - BLOCKS_CACHE_SIZE up to and including the fork_point.
|
||||
@ -688,7 +688,7 @@ class Blockchain(BlockchainInterface):
|
||||
for block_record in block_records.values():
|
||||
self.add_block_record(block_record)
|
||||
|
||||
def clean_block_record(self, height: int):
|
||||
def clean_block_record(self, height: int) -> None:
|
||||
"""
|
||||
Clears all block records in the cache which have block_record < height.
|
||||
Args:
|
||||
@ -707,7 +707,7 @@ class Blockchain(BlockchainInterface):
|
||||
height = height - 1
|
||||
blocks_to_remove = self.__heights_in_cache.get(uint32(height), None)
|
||||
|
||||
def clean_block_records(self):
|
||||
def clean_block_records(self) -> None:
|
||||
"""
|
||||
Cleans the cache so that we only maintain relevant blocks. This removes
|
||||
block records that have height < peak - BLOCKS_CACHE_SIZE.
|
||||
@ -771,7 +771,7 @@ class Blockchain(BlockchainInterface):
|
||||
return None
|
||||
return header_dict[header_hash]
|
||||
|
||||
async def get_block_records_at(self, heights: List[uint32], batch_size=900) -> List[BlockRecord]:
|
||||
async def get_block_records_at(self, heights: List[uint32], batch_size: int = 900) -> List[BlockRecord]:
|
||||
"""
|
||||
gets block records by height (only blocks that are part of the chain)
|
||||
"""
|
||||
@ -798,12 +798,12 @@ class Blockchain(BlockchainInterface):
|
||||
return self.__block_records[header_hash]
|
||||
return await self.block_store.get_block_record(header_hash)
|
||||
|
||||
def remove_block_record(self, header_hash: bytes32):
|
||||
def remove_block_record(self, header_hash: bytes32) -> None:
|
||||
sbr = self.block_record(header_hash)
|
||||
del self.__block_records[header_hash]
|
||||
self.__heights_in_cache[sbr.height].remove(header_hash)
|
||||
|
||||
def add_block_record(self, block_record: BlockRecord):
|
||||
def add_block_record(self, block_record: BlockRecord) -> None:
|
||||
"""
|
||||
Adds a block record to the cache.
|
||||
"""
|
||||
@ -815,8 +815,8 @@ class Blockchain(BlockchainInterface):
|
||||
|
||||
async def persist_sub_epoch_challenge_segments(
|
||||
self, ses_block_hash: bytes32, segments: List[SubEpochChallengeSegment]
|
||||
):
|
||||
return await self.block_store.persist_sub_epoch_challenge_segments(ses_block_hash, segments)
|
||||
) -> None:
|
||||
await self.block_store.persist_sub_epoch_challenge_segments(ses_block_hash, segments)
|
||||
|
||||
async def get_sub_epoch_challenge_segments(
|
||||
self,
|
||||
@ -841,7 +841,7 @@ class Blockchain(BlockchainInterface):
|
||||
return False
|
||||
|
||||
async def get_block_generator(
|
||||
self, block: BlockInfo, additional_blocks: Dict[bytes32, FullBlock] = None
|
||||
self, block: BlockInfo, additional_blocks: Optional[Dict[bytes32, FullBlock]] = None
|
||||
) -> Optional[BlockGenerator]:
|
||||
if additional_blocks is None:
|
||||
additional_blocks = {}
|
||||
|
@ -34,16 +34,16 @@ class BlockchainInterface:
|
||||
def contains_block(self, header_hash: bytes32) -> bool:
|
||||
pass
|
||||
|
||||
def remove_block_record(self, header_hash: bytes32):
|
||||
def remove_block_record(self, header_hash: bytes32) -> None:
|
||||
pass
|
||||
|
||||
def add_block_record(self, block_record: BlockRecord):
|
||||
def add_block_record(self, block_record: BlockRecord) -> None:
|
||||
pass
|
||||
|
||||
def contains_height(self, height: uint32) -> bool:
|
||||
pass
|
||||
|
||||
async def warmup(self, fork_point: uint32):
|
||||
async def warmup(self, fork_point: uint32) -> None:
|
||||
pass
|
||||
|
||||
async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]:
|
||||
@ -72,7 +72,7 @@ class BlockchainInterface:
|
||||
|
||||
async def persist_sub_epoch_challenge_segments(
|
||||
self, sub_epoch_summary_height: bytes32, segments: List[SubEpochChallengeSegment]
|
||||
):
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def get_sub_epoch_challenge_segments(
|
||||
|
@ -18,11 +18,11 @@ def farmer_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes3
|
||||
return bytes32(genesis_challenge[16:] + block_height.to_bytes(16, "big"))
|
||||
|
||||
|
||||
def create_pool_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
|
||||
def create_pool_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32) -> Coin:
|
||||
parent_id = pool_parent_id(block_height, genesis_challenge)
|
||||
return Coin(parent_id, puzzle_hash, reward)
|
||||
|
||||
|
||||
def create_farmer_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
|
||||
def create_farmer_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32) -> Coin:
|
||||
parent_id = farmer_parent_id(block_height, genesis_challenge)
|
||||
return Coin(parent_id, puzzle_hash, reward)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.byte_types import hexstr_to_bytes
|
||||
@ -61,10 +62,10 @@ class ConsensusConstants:
|
||||
POOL_SUB_SLOT_ITERS: uint64
|
||||
SOFT_FORK_HEIGHT: uint32
|
||||
|
||||
def replace(self, **changes) -> "ConsensusConstants":
|
||||
def replace(self, **changes: object) -> "ConsensusConstants":
|
||||
return dataclasses.replace(self, **changes)
|
||||
|
||||
def replace_str_to_bytes(self, **changes) -> "ConsensusConstants":
|
||||
def replace_str_to_bytes(self, **changes: Any) -> "ConsensusConstants":
|
||||
"""
|
||||
Overrides str (hex) values with bytes.
|
||||
"""
|
||||
|
@ -199,7 +199,7 @@ def _get_next_sub_slot_iters(
|
||||
block_at_height_included_ses: bool,
|
||||
new_slot: bool,
|
||||
signage_point_total_iters: uint128,
|
||||
skip_epoch_check=False,
|
||||
skip_epoch_check: bool = False,
|
||||
) -> uint64:
|
||||
"""
|
||||
Returns the slot iterations required for the next block after the one at height, where new_slot is true
|
||||
@ -278,7 +278,7 @@ def _get_next_difficulty(
|
||||
block_at_height_included_ses: bool,
|
||||
new_slot: bool,
|
||||
signage_point_total_iters: uint128,
|
||||
skip_epoch_check=False,
|
||||
skip_epoch_check: bool = False,
|
||||
) -> uint64:
|
||||
"""
|
||||
Returns the difficulty of the next block that extends onto block.
|
||||
|
@ -58,7 +58,7 @@ def get_block_challenge(
|
||||
genesis_block: bool,
|
||||
overflow: bool,
|
||||
skip_overflow_last_ss_validation: bool,
|
||||
):
|
||||
) -> bytes32:
|
||||
if len(header_block.finished_sub_slots) > 0:
|
||||
if overflow:
|
||||
# New sub-slot with overflow block
|
||||
|
@ -3,7 +3,7 @@ import logging
|
||||
import traceback
|
||||
from concurrent.futures import Executor
|
||||
from dataclasses import dataclass
|
||||
from typing import Awaitable, Callable, Dict, List, Optional, Sequence, Tuple
|
||||
from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence, Tuple
|
||||
|
||||
from blspy import AugSchemeMPL, G1Element
|
||||
|
||||
@ -45,7 +45,7 @@ class PreValidationResult(Streamable):
|
||||
|
||||
|
||||
def batch_pre_validate_blocks(
|
||||
constants_dict: Dict,
|
||||
constants_dict: Dict[str, Any],
|
||||
blocks_pickled: Dict[bytes, bytes],
|
||||
full_blocks_pickled: Optional[List[bytes]],
|
||||
header_blocks_pickled: Optional[List[bytes]],
|
||||
@ -122,7 +122,8 @@ def batch_pre_validate_blocks(
|
||||
if npc_result is not None and block.transactions_info is not None:
|
||||
assert npc_result.conds
|
||||
pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, constants.AGG_SIG_ME_ADDITIONAL_DATA)
|
||||
pks_objects: List[G1Element] = [G1Element.from_bytes(pk) for pk in pairs_pks]
|
||||
# Using AugSchemeMPL.aggregate_verify, so it's safe to use from_bytes_unchecked
|
||||
pks_objects: List[G1Element] = [G1Element.from_bytes_unchecked(pk) for pk in pairs_pks]
|
||||
if not AugSchemeMPL.aggregate_verify(
|
||||
pks_objects, pairs_msgs, block.transactions_info.aggregated_signature
|
||||
):
|
||||
@ -163,13 +164,13 @@ def batch_pre_validate_blocks(
|
||||
|
||||
async def pre_validate_blocks_multiprocessing(
|
||||
constants: ConsensusConstants,
|
||||
constants_json: Dict,
|
||||
constants_json: Dict[str, Any],
|
||||
block_records: BlockchainInterface,
|
||||
blocks: Sequence[FullBlock],
|
||||
pool: Executor,
|
||||
check_filter: bool,
|
||||
npc_results: Dict[uint32, NPCResult],
|
||||
get_block_generator: Callable[[BlockInfo, Optional[Dict[bytes32, FullBlock]]], Awaitable[Optional[BlockGenerator]]],
|
||||
get_block_generator: Callable[[BlockInfo, Dict[bytes32, FullBlock]], Awaitable[Optional[BlockGenerator]]],
|
||||
batch_size: int,
|
||||
wp_summaries: Optional[List[SubEpochSummary]] = None,
|
||||
*,
|
||||
|
@ -14,4 +14,4 @@ def _expected_plot_size(k: int) -> uint64:
|
||||
is necessary to store the entries in the plot.
|
||||
"""
|
||||
|
||||
return ((2 * k) + 1) * (2 ** (k - 1))
|
||||
return uint64(((2 * k) + 1) * (2 ** (k - 1)))
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from chia.consensus.block_record import BlockRecord
|
||||
from chia.consensus.blockchain_interface import BlockchainInterface
|
||||
@ -17,7 +17,7 @@ def get_signage_point_vdf_info(
|
||||
blocks: BlockchainInterface,
|
||||
sp_total_iters: uint128,
|
||||
sp_iters: uint64,
|
||||
):
|
||||
) -> Tuple[bytes32, bytes32, ClassgroupElement, ClassgroupElement, uint64, uint64]:
|
||||
"""
|
||||
Returns the following information, for the VDF of the signage point at sp_total_iters.
|
||||
cc and rc challenge hash
|
||||
|
@ -136,7 +136,7 @@ class BlockHeightMap:
|
||||
if ses is not None:
|
||||
self.__sub_epoch_summaries[height] = bytes(ses)
|
||||
|
||||
async def maybe_flush(self):
|
||||
async def maybe_flush(self) -> None:
|
||||
if self.__dirty < 1000:
|
||||
return
|
||||
|
||||
|
@ -8,7 +8,6 @@ from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.coin_record import CoinRecord
|
||||
from chia.util.db_wrapper import DBWrapper2
|
||||
from chia.util.ints import uint32, uint64
|
||||
from chia.util.lru_cache import LRUCache
|
||||
from chia.util.chunks import chunks
|
||||
import time
|
||||
import logging
|
||||
@ -21,18 +20,14 @@ MAX_SQLITE_PARAMETERS = 900
|
||||
class CoinStore:
|
||||
"""
|
||||
This object handles CoinRecords in DB.
|
||||
A cache is maintained for quicker access to recent coins.
|
||||
"""
|
||||
|
||||
coin_record_cache: LRUCache
|
||||
cache_size: uint32
|
||||
db_wrapper: DBWrapper2
|
||||
|
||||
@classmethod
|
||||
async def create(cls, db_wrapper: DBWrapper2, cache_size: uint32 = uint32(60000)):
|
||||
async def create(cls, db_wrapper: DBWrapper2):
|
||||
self = cls()
|
||||
|
||||
self.cache_size = cache_size
|
||||
self.db_wrapper = db_wrapper
|
||||
|
||||
async with self.db_wrapper.write_db() as conn:
|
||||
@ -81,7 +76,6 @@ class CoinStore:
|
||||
|
||||
await conn.execute("CREATE INDEX IF NOT EXISTS coin_parent_index on coin_record(coin_parent)")
|
||||
|
||||
self.coin_record_cache = LRUCache(cache_size)
|
||||
return self
|
||||
|
||||
async def num_unspent(self) -> int:
|
||||
@ -161,10 +155,6 @@ class CoinStore:
|
||||
|
||||
# Checks DB and DiffStores for CoinRecord with coin_name and returns it
|
||||
async def get_coin_record(self, coin_name: bytes32) -> Optional[CoinRecord]:
|
||||
cached = self.coin_record_cache.get(coin_name)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
async with self.db_wrapper.read_db() as conn:
|
||||
async with conn.execute(
|
||||
"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
|
||||
@ -174,9 +164,7 @@ class CoinStore:
|
||||
row = await cursor.fetchone()
|
||||
if row is not None:
|
||||
coin = self.row_to_coin(row)
|
||||
record = CoinRecord(coin, row[0], row[1], row[2], row[6])
|
||||
self.coin_record_cache.put(record.coin.name(), record)
|
||||
return record
|
||||
return CoinRecord(coin, row[0], row[1], row[2], row[6])
|
||||
return None
|
||||
|
||||
async def get_coin_records(self, names: List[bytes32]) -> List[CoinRecord]:
|
||||
@ -184,14 +172,6 @@ class CoinStore:
|
||||
return []
|
||||
|
||||
coins: List[CoinRecord] = []
|
||||
new_names: List[bytes32] = []
|
||||
for n in names:
|
||||
cached = self.coin_record_cache.get(n)
|
||||
if cached is not None:
|
||||
coins.append(cached)
|
||||
else:
|
||||
new_names.append(n)
|
||||
names = new_names
|
||||
|
||||
if len(names) == 0:
|
||||
return coins
|
||||
@ -218,7 +198,6 @@ class CoinStore:
|
||||
coin = self.row_to_coin(row)
|
||||
record = CoinRecord(coin, row[0], row[1], row[2], row[6])
|
||||
coins.append(record)
|
||||
self.coin_record_cache.put(record.coin.name(), record)
|
||||
|
||||
return coins
|
||||
|
||||
@ -452,23 +431,6 @@ class CoinStore:
|
||||
Note that block_index can be negative, in which case everything is rolled back
|
||||
Returns the list of coin records that have been modified
|
||||
"""
|
||||
# Update memory cache
|
||||
delete_queue: List[bytes32] = []
|
||||
for coin_name, coin_record in list(self.coin_record_cache.cache.items()):
|
||||
if int(coin_record.spent_block_index) > block_index:
|
||||
new_record = CoinRecord(
|
||||
coin_record.coin,
|
||||
coin_record.confirmed_block_index,
|
||||
uint32(0),
|
||||
coin_record.coinbase,
|
||||
coin_record.timestamp,
|
||||
)
|
||||
self.coin_record_cache.put(coin_record.coin.name(), new_record)
|
||||
if int(coin_record.confirmed_block_index) > block_index:
|
||||
delete_queue.append(coin_name)
|
||||
|
||||
for coin_name in delete_queue:
|
||||
self.coin_record_cache.remove(coin_name)
|
||||
|
||||
coin_changes: Dict[bytes32, CoinRecord] = {}
|
||||
# Add coins that are confirmed in the reverted blocks to the list of updated coins.
|
||||
@ -506,13 +468,12 @@ class CoinStore:
|
||||
)
|
||||
return list(coin_changes.values())
|
||||
|
||||
# Store CoinRecord in DB and ram cache
|
||||
# Store CoinRecord in DB
|
||||
async def _add_coin_records(self, records: List[CoinRecord]) -> None:
|
||||
|
||||
if self.db_wrapper.db_version == 2:
|
||||
values2 = []
|
||||
for record in records:
|
||||
self.coin_record_cache.put(record.coin.name(), record)
|
||||
values2.append(
|
||||
(
|
||||
record.coin.name(),
|
||||
@ -534,7 +495,6 @@ class CoinStore:
|
||||
else:
|
||||
values = []
|
||||
for record in records:
|
||||
self.coin_record_cache.put(record.coin.name(), record)
|
||||
values.append(
|
||||
(
|
||||
record.coin.name().hex(),
|
||||
@ -559,20 +519,10 @@ class CoinStore:
|
||||
async def _set_spent(self, coin_names: List[bytes32], index: uint32):
|
||||
|
||||
assert len(coin_names) == 0 or index > 0
|
||||
# if this coin is in the cache, mark it as spent in there
|
||||
updates = []
|
||||
for coin_name in coin_names:
|
||||
r = self.coin_record_cache.get(coin_name)
|
||||
if r is not None:
|
||||
if r.spent_block_index != uint32(0):
|
||||
raise ValueError(f"Coin already spent in cache: {coin_name}")
|
||||
|
||||
self.coin_record_cache.put(
|
||||
r.name, CoinRecord(r.coin, r.confirmed_block_index, index, r.coinbase, r.timestamp)
|
||||
)
|
||||
updates.append((index, self.maybe_to_hex(coin_name)))
|
||||
|
||||
assert len(updates) == len(coin_names)
|
||||
async with self.db_wrapper.write_db() as conn:
|
||||
if self.db_wrapper.db_version == 2:
|
||||
ret: Cursor = await conn.executemany(
|
||||
|
@ -1573,7 +1573,7 @@ def _get_curr_diff_ssi(constants: ConsensusConstants, idx, summaries):
|
||||
|
||||
|
||||
def vars_to_bytes(constants: ConsensusConstants, summaries: List[SubEpochSummary], weight_proof: WeightProof):
|
||||
constants_dict = recurse_jsonify(dataclasses.asdict(constants))
|
||||
constants_dict = recurse_jsonify(constants)
|
||||
wp_recent_chain_bytes = bytes(RecentChainData(weight_proof.recent_chain_data))
|
||||
wp_segment_bytes = bytes(SubEpochSegments(weight_proof.sub_epoch_segments))
|
||||
summary_bytes = []
|
||||
|
@ -104,8 +104,9 @@ class Harvester:
|
||||
def on_disconnect(self, connection: ws.WSChiaConnection):
|
||||
self.log.info(f"peer disconnected {connection.get_peer_logging()}")
|
||||
self.state_changed("close_connection")
|
||||
self.plot_manager.stop_refreshing()
|
||||
self.plot_sync_sender.stop()
|
||||
asyncio.run_coroutine_threadsafe(self.plot_sync_sender.await_closed(), asyncio.get_running_loop())
|
||||
self.plot_manager.stop_refreshing()
|
||||
|
||||
def get_plots(self) -> Tuple[List[Dict], List[str], List[str]]:
|
||||
self.log.debug(f"get_plots prover items: {self.plot_manager.plot_count()}")
|
||||
|
@ -1,6 +1,5 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from dataclasses import dataclass
|
||||
@ -91,7 +90,6 @@ class Sender:
|
||||
_last_sync_id: uint64
|
||||
_stop_requested = False
|
||||
_task: Optional[asyncio.Task] # type: ignore[type-arg] # Asks for Task parameter which doesn't work
|
||||
_lock: threading.Lock
|
||||
_response: Optional[ExpectedResponse]
|
||||
|
||||
def __init__(self, plot_manager: PlotManager) -> None:
|
||||
@ -103,7 +101,6 @@ class Sender:
|
||||
self._last_sync_id = uint64(0)
|
||||
self._stop_requested = False
|
||||
self._task = None
|
||||
self._lock = threading.Lock()
|
||||
self._response = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
@ -145,8 +142,6 @@ class Sender:
|
||||
self._sync_id = uint64(0)
|
||||
self._next_message_id = uint64(0)
|
||||
self._messages.clear()
|
||||
if self._lock.locked():
|
||||
self._lock.release()
|
||||
if self._task is not None:
|
||||
# TODO, Add typing in PlotManager
|
||||
self.sync_start(self._plot_manager.plot_count(), True) # type:ignore[no-untyped-call]
|
||||
@ -256,7 +251,11 @@ class Sender:
|
||||
|
||||
def sync_start(self, count: float, initial: bool) -> None:
|
||||
log.debug(f"sync_start {self}: count {count}, initial {initial}")
|
||||
self._lock.acquire()
|
||||
while self.sync_active():
|
||||
if self._stop_requested:
|
||||
log.debug("sync_start aborted")
|
||||
return
|
||||
time.sleep(0.1)
|
||||
sync_id = int(time.time())
|
||||
# Make sure we have unique sync-id's even if we restart refreshing within a second (i.e. in tests)
|
||||
if sync_id == self._last_sync_id:
|
||||
@ -294,13 +293,13 @@ class Sender:
|
||||
log.debug(f"_finalize_sync {self}")
|
||||
assert self._sync_id != 0
|
||||
self._last_sync_id = self._sync_id
|
||||
self._sync_id = uint64(0)
|
||||
self._next_message_id = uint64(0)
|
||||
self._messages.clear()
|
||||
self._lock.release()
|
||||
# Do this at the end since `_sync_id` is used as sync active indicator.
|
||||
self._sync_id = uint64(0)
|
||||
|
||||
def sync_active(self) -> bool:
|
||||
return self._lock.locked() and self._sync_id != 0
|
||||
return self._sync_id != 0
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self._connection is not None
|
||||
|
@ -70,9 +70,11 @@ def get_plot_directories(root_path: Path, config: Dict = None) -> List[str]:
|
||||
def get_plot_filenames(root_path: Path) -> Dict[Path, List[Path]]:
|
||||
# Returns a map from directory to a list of all plots in the directory
|
||||
all_files: Dict[Path, List[Path]] = {}
|
||||
for directory_name in get_plot_directories(root_path):
|
||||
config = load_config(root_path, "config.yaml")
|
||||
recursive_scan: bool = config["harvester"].get("recursive_plot_scan", False)
|
||||
for directory_name in get_plot_directories(root_path, config):
|
||||
directory = Path(directory_name).resolve()
|
||||
all_files[directory] = get_filenames(directory)
|
||||
all_files[directory] = get_filenames(directory, recursive_scan)
|
||||
return all_files
|
||||
|
||||
|
||||
@ -109,7 +111,7 @@ def remove_plot(path: Path):
|
||||
path.unlink()
|
||||
|
||||
|
||||
def get_filenames(directory: Path) -> List[Path]:
|
||||
def get_filenames(directory: Path, recursive: bool) -> List[Path]:
|
||||
try:
|
||||
if not directory.exists():
|
||||
log.warning(f"Directory: {directory} does not exist.")
|
||||
@ -119,13 +121,9 @@ def get_filenames(directory: Path) -> List[Path]:
|
||||
return []
|
||||
all_files: List[Path] = []
|
||||
try:
|
||||
for child in directory.iterdir():
|
||||
if not child.is_dir():
|
||||
# If it is a file ending in .plot, add it - work around MacOS ._ files
|
||||
if child.suffix == ".plot" and not child.name.startswith("._"):
|
||||
all_files.append(child)
|
||||
else:
|
||||
log.debug(f"Not checking subdirectory {child}, subdirectories not added by default")
|
||||
glob_function = directory.rglob if recursive else directory.glob
|
||||
all_files = [child for child in glob_function("*.plot") if child.is_file() and not child.name.startswith("._")]
|
||||
log.debug(f"get_filenames: {len(all_files)} files found in {directory}, recursive: {recursive}")
|
||||
except Exception as e:
|
||||
log.warning(f"Error reading directory {directory} {e}")
|
||||
return all_files
|
||||
|
@ -470,7 +470,11 @@ class FullNodeRpcApi:
|
||||
|
||||
newer_block = await self.service.block_store.get_block_record(newer_block_bytes)
|
||||
if newer_block is None:
|
||||
raise ValueError(f"Newer block {newer_block_hex} not found")
|
||||
# It's possible that the peak block has not yet been committed to the DB, so as a fallback, check memory
|
||||
try:
|
||||
newer_block = self.service.blockchain.block_record(newer_block_bytes)
|
||||
except KeyError:
|
||||
raise ValueError(f"Newer block {newer_block_hex} not found")
|
||||
older_block = await self.service.block_store.get_block_record(older_block_bytes)
|
||||
if older_block is None:
|
||||
raise ValueError(f"Older block {older_block_hex} not found")
|
||||
|
@ -3,7 +3,7 @@ import json
|
||||
import logging
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
from typing import Any, Callable, Coroutine, Dict, List, Optional, Tuple
|
||||
|
||||
from aiohttp import ClientConnectorError, ClientSession, ClientWebSocketResponse, WSMsgType, web
|
||||
|
||||
@ -308,12 +308,16 @@ async def start_rpc_server(
|
||||
root_path: Path,
|
||||
net_config,
|
||||
connect_to_daemon=True,
|
||||
):
|
||||
max_request_body_size=None,
|
||||
name: str = "rpc_server",
|
||||
) -> Tuple[Callable[[], Coroutine[Any, Any, None]], uint16]:
|
||||
"""
|
||||
Starts an HTTP server with the following RPC methods, to be used by local clients to
|
||||
query the node.
|
||||
"""
|
||||
app = web.Application()
|
||||
if max_request_body_size is None:
|
||||
max_request_body_size = 1024 ** 2
|
||||
app = web.Application(client_max_size=max_request_body_size)
|
||||
rpc_server = RpcServer(rpc_api, rpc_api.service_name, stop_cb, root_path, net_config)
|
||||
rpc_server.rpc_api.service._set_state_changed_callback(rpc_server.state_changed)
|
||||
app.add_routes([web.post(route, wrap_http_handler(func)) for (route, func) in rpc_server.get_routes().items()])
|
||||
@ -321,8 +325,10 @@ async def start_rpc_server(
|
||||
daemon_connection = asyncio.create_task(rpc_server.connect_to_daemon(self_hostname, daemon_port))
|
||||
runner = web.AppRunner(app, access_log=None)
|
||||
await runner.setup()
|
||||
|
||||
site = web.TCPSite(runner, self_hostname, int(rpc_port), ssl_context=rpc_server.ssl_context)
|
||||
await site.start()
|
||||
rpc_port = runner.addresses[0][1]
|
||||
|
||||
async def cleanup():
|
||||
await rpc_server.stop()
|
||||
@ -330,4 +336,4 @@ async def start_rpc_server(
|
||||
if connect_to_daemon:
|
||||
await daemon_connection
|
||||
|
||||
return cleanup
|
||||
return cleanup, rpc_port
|
||||
|
@ -11,12 +11,14 @@ from chia.consensus.block_rewards import calculate_base_farmer_reward
|
||||
from chia.pools.pool_wallet import PoolWallet
|
||||
from chia.pools.pool_wallet_info import FARMING_TO_POOL, PoolState, PoolWalletInfo, create_pool_state
|
||||
from chia.protocols.protocol_message_types import ProtocolMessageTypes
|
||||
from chia.protocols.wallet_protocol import CoinState
|
||||
from chia.server.outbound_message import NodeType, make_msg
|
||||
from chia.simulator.simulator_protocol import FarmNewBlockProtocol
|
||||
from chia.types.announcement import Announcement
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import Program
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.coin_spend import CoinSpend
|
||||
from chia.types.spend_bundle import SpendBundle
|
||||
from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
|
||||
from chia.util.byte_types import hexstr_to_bytes
|
||||
@ -35,8 +37,10 @@ from chia.wallet.derive_keys import (
|
||||
match_address_to_sk,
|
||||
)
|
||||
from chia.wallet.did_wallet.did_wallet import DIDWallet
|
||||
from chia.wallet.nft_wallet.nft_puzzles import get_nft_info_from_puzzle
|
||||
from chia.wallet.nft_wallet import nft_puzzles
|
||||
from chia.wallet.nft_wallet.nft_info import NFTInfo
|
||||
from chia.wallet.nft_wallet.nft_wallet import NFTWallet
|
||||
from chia.wallet.nft_wallet.uncurry_nft import UncurriedNFT
|
||||
from chia.wallet.outer_puzzles import AssetType
|
||||
from chia.wallet.puzzle_drivers import PuzzleInfo
|
||||
from chia.wallet.rl_wallet.rl_wallet import RLWallet
|
||||
@ -131,6 +135,7 @@ class WalletRpcApi:
|
||||
# NFT Wallet
|
||||
"/nft_mint_nft": self.nft_mint_nft,
|
||||
"/nft_get_nfts": self.nft_get_nfts,
|
||||
"/nft_get_info": self.nft_get_info,
|
||||
"/nft_transfer_nft": self.nft_transfer_nft,
|
||||
"/nft_add_uri": self.nft_add_uri,
|
||||
# RL wallet
|
||||
@ -1321,21 +1326,42 @@ class WalletRpcApi:
|
||||
wallet_id = uint32(request["wallet_id"])
|
||||
assert self.service.wallet_state_manager
|
||||
nft_wallet: NFTWallet = self.service.wallet_state_manager.wallets[wallet_id]
|
||||
address = request.get("artist_address")
|
||||
if isinstance(address, str):
|
||||
puzzle_hash = decode_puzzle_hash(address)
|
||||
elif address is None:
|
||||
puzzle_hash = await nft_wallet.standard_wallet.get_new_puzzlehash()
|
||||
royalty_address = request.get("royalty_address")
|
||||
if isinstance(royalty_address, str):
|
||||
royalty_puzhash = decode_puzzle_hash(royalty_address)
|
||||
elif royalty_address is None:
|
||||
royalty_puzhash = await nft_wallet.standard_wallet.get_new_puzzlehash()
|
||||
else:
|
||||
puzzle_hash = address
|
||||
royalty_puzhash = royalty_address
|
||||
target_address = request.get("target_address")
|
||||
if isinstance(target_address, str):
|
||||
target_puzhash = decode_puzzle_hash(target_address)
|
||||
elif target_address is None:
|
||||
target_puzhash = await nft_wallet.standard_wallet.get_new_puzzlehash()
|
||||
else:
|
||||
target_puzhash = target_address
|
||||
if "uris" not in request:
|
||||
return {"success": False, "error": "Data URIs is required"}
|
||||
if not isinstance(request["uris"], list):
|
||||
return {"success": False, "error": "Data URIs must be a list"}
|
||||
if not isinstance(request.get("meta_uris", []), list):
|
||||
return {"success": False, "error": "Metadata URIs must be a list"}
|
||||
if not isinstance(request.get("license_uris", []), list):
|
||||
return {"success": False, "error": "License URIs must be a list"}
|
||||
metadata = Program.to(
|
||||
[
|
||||
("u", request["uris"]),
|
||||
("h", hexstr_to_bytes(request["hash"])),
|
||||
("mu", request.get("meta_uris", [])),
|
||||
("mh", hexstr_to_bytes(request.get("meta_hash", "00"))),
|
||||
("lu", request.get("license_uris", [])),
|
||||
("lh", hexstr_to_bytes(request.get("license_hash", "00"))),
|
||||
("sn", uint64(request.get("series_number", 1))),
|
||||
("st", uint64(request.get("series_total", 1))),
|
||||
]
|
||||
)
|
||||
fee = uint64(request.get("fee", 0))
|
||||
spend_bundle = await nft_wallet.generate_new_nft(metadata, puzzle_hash, fee=fee)
|
||||
spend_bundle = await nft_wallet.generate_new_nft(metadata, royalty_puzhash, target_puzhash, fee=fee)
|
||||
return {"wallet_id": wallet_id, "success": True, "spend_bundle": spend_bundle}
|
||||
|
||||
async def nft_get_nfts(self, request) -> Dict:
|
||||
@ -1345,7 +1371,7 @@ class WalletRpcApi:
|
||||
nfts = nft_wallet.get_current_nfts()
|
||||
nft_info_list = []
|
||||
for nft in nfts:
|
||||
nft_info_list.append(get_nft_info_from_puzzle(nft.full_puzzle, nft.coin))
|
||||
nft_info_list.append(nft_puzzles.get_nft_info_from_puzzle(nft.full_puzzle, nft.coin))
|
||||
return {"wallet_id": wallet_id, "success": True, "nft_list": nft_info_list}
|
||||
|
||||
async def nft_transfer_nft(self, request):
|
||||
@ -1366,15 +1392,88 @@ class WalletRpcApi:
|
||||
log.exception(f"Failed to transfer NFT: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
async def nft_get_info(self, request: Dict) -> Optional[Dict]:
|
||||
assert self.service.wallet_state_manager is not None
|
||||
if "coin_id" not in request:
|
||||
return {"success": False, "error": "Coin ID is required."}
|
||||
coin_id = bytes32.from_hexstr(request["coin_id"])
|
||||
peer = self.service.wallet_state_manager.wallet_node.get_full_node_peer()
|
||||
if peer is None:
|
||||
return {"success": False, "error": "Cannot find a full node peer."}
|
||||
# Get coin state
|
||||
coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state(
|
||||
[coin_id], peer=peer
|
||||
)
|
||||
if coin_state_list is None or len(coin_state_list) < 1:
|
||||
return {"success": False, "error": f"Coin record 0x{coin_id.hex()} not found"}
|
||||
coin_state: CoinState = coin_state_list[0]
|
||||
if request.get("latest", True):
|
||||
# Find the unspent coin
|
||||
while coin_state.spent_height is not None:
|
||||
coin_state_list = await self.service.wallet_state_manager.wallet_node.fetch_children(
|
||||
peer, coin_state.coin.name()
|
||||
)
|
||||
odd_coin = 0
|
||||
for coin in coin_state_list:
|
||||
if coin.coin.amount % 2 == 1:
|
||||
odd_coin += 1
|
||||
if odd_coin > 1:
|
||||
return {"success": False, "error": "This is not a singleton, multiple children coins found."}
|
||||
coin_state = coin_state_list[0]
|
||||
# Get parent coin
|
||||
parent_coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state(
|
||||
[coin_state.coin.parent_coin_info], peer=peer
|
||||
)
|
||||
if parent_coin_state_list is None or len(parent_coin_state_list) < 1:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Parent coin record 0x{coin_state.coin.parent_coin_info.hex()} not found",
|
||||
}
|
||||
parent_coin_state: CoinState = parent_coin_state_list[0]
|
||||
coin_spend: CoinSpend = await self.service.wallet_state_manager.wallet_node.fetch_puzzle_solution(
|
||||
peer, parent_coin_state.spent_height, parent_coin_state.coin
|
||||
)
|
||||
# convert to NFTInfo
|
||||
try:
|
||||
# Check if the metadata is updated
|
||||
inner_solution: Program = Program.from_bytes(bytes(coin_spend.solution)).rest().rest().first().first()
|
||||
full_puzzle: Program = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
|
||||
update_condition = None
|
||||
for condition in inner_solution.rest().first().rest().as_iter():
|
||||
if condition.first().as_int() == -24:
|
||||
update_condition = condition
|
||||
break
|
||||
if update_condition is not None:
|
||||
uncurried_nft: UncurriedNFT = UncurriedNFT.uncurry(full_puzzle)
|
||||
metadata: Program = uncurried_nft.metadata
|
||||
metadata = nft_puzzles.update_metadata(metadata, update_condition)
|
||||
# Note: This is not the actual unspent NFT full puzzle.
|
||||
# There is no way to rebuild the full puzzle in a different wallet.
|
||||
# But it shouldn't have impact on generating the NFTInfo, since inner_puzzle is not used there.
|
||||
full_puzzle = nft_puzzles.create_full_puzzle(
|
||||
uncurried_nft.singleton_launcher_id,
|
||||
metadata,
|
||||
uncurried_nft.metadata_updater_hash,
|
||||
uncurried_nft.inner_puzzle,
|
||||
)
|
||||
nft_info: NFTInfo = nft_puzzles.get_nft_info_from_puzzle(full_puzzle, coin_state.coin)
|
||||
except Exception as e:
|
||||
return {"success": False, "error": f"The coin is not a NFT. {e}"}
|
||||
else:
|
||||
return {"success": True, "nft_info": nft_info}
|
||||
|
||||
async def nft_add_uri(self, request) -> Dict:
|
||||
assert self.service.wallet_state_manager is not None
|
||||
wallet_id = uint32(request["wallet_id"])
|
||||
uri = request["uri"]
|
||||
# Note metadata updater can only add one uri for one field per spend.
|
||||
# If you want to add multiple uris for one field, you need to spend multiple times.
|
||||
nft_wallet: NFTWallet = self.service.wallet_state_manager.wallets[wallet_id]
|
||||
try:
|
||||
uri = request["uri"]
|
||||
key = request["key"]
|
||||
nft_coin_info = nft_wallet.get_nft_coin_by_id(bytes32.from_hexstr(request["nft_coin_id"]))
|
||||
fee = uint64(request.get("fee", 0))
|
||||
spend_bundle = await nft_wallet.update_metadata(nft_coin_info, uri, fee=fee)
|
||||
spend_bundle = await nft_wallet.update_metadata(nft_coin_info, key, uri, fee=fee)
|
||||
return {"wallet_id": wallet_id, "success": True, "spend_bundle": spend_bundle}
|
||||
except Exception as e:
|
||||
log.exception(f"Failed to update NFT metadata: {e}")
|
||||
|
@ -52,16 +52,16 @@ class WalletRpcClient(RpcClient):
|
||||
async def generate_mnemonic(self) -> List[str]:
|
||||
return (await self.fetch("generate_mnemonic", {}))["mnemonic"]
|
||||
|
||||
async def add_key(self, mnemonic: List[str], request_type: str = "new_wallet") -> None:
|
||||
async def add_key(self, mnemonic: List[str], request_type: str = "new_wallet") -> Dict[str, Any]:
|
||||
return await self.fetch("add_key", {"mnemonic": mnemonic, "type": request_type})
|
||||
|
||||
async def delete_key(self, fingerprint: int) -> None:
|
||||
async def delete_key(self, fingerprint: int) -> Dict[str, Any]:
|
||||
return await self.fetch("delete_key", {"fingerprint": fingerprint})
|
||||
|
||||
async def check_delete_key(self, fingerprint: int, max_ph_to_search: int = 100) -> None:
|
||||
async def check_delete_key(self, fingerprint: int, max_ph_to_search: int = 100) -> Dict[str, Any]:
|
||||
return await self.fetch("check_delete_key", {"fingerprint": fingerprint, "max_ph_to_search": max_ph_to_search})
|
||||
|
||||
async def delete_all_keys(self) -> None:
|
||||
async def delete_all_keys(self) -> Dict[str, Any]:
|
||||
return await self.fetch("delete_all_keys", {})
|
||||
|
||||
# Wallet Node APIs
|
||||
@ -77,7 +77,7 @@ class WalletRpcClient(RpcClient):
|
||||
async def push_tx(self, spend_bundle):
|
||||
return await self.fetch("push_tx", {"spend_bundle": bytes(spend_bundle).hex()})
|
||||
|
||||
async def farm_block(self, address: str) -> None:
|
||||
async def farm_block(self, address: str) -> Dict[str, Any]:
|
||||
return await self.fetch("farm_block", {"address": address})
|
||||
|
||||
# Wallet Management APIs
|
||||
@ -105,6 +105,7 @@ class WalletRpcClient(RpcClient):
|
||||
end: int = None,
|
||||
sort_key: SortKey = None,
|
||||
reverse: bool = False,
|
||||
to_address: Optional[str] = None,
|
||||
) -> List[TransactionRecord]:
|
||||
request: Dict[str, Any] = {"wallet_id": wallet_id}
|
||||
|
||||
@ -116,6 +117,9 @@ class WalletRpcClient(RpcClient):
|
||||
request["sort_key"] = sort_key.name
|
||||
request["reverse"] = reverse
|
||||
|
||||
if to_address is not None:
|
||||
request["to_address"] = to_address
|
||||
|
||||
res = await self.fetch(
|
||||
"get_transactions",
|
||||
request,
|
||||
@ -429,11 +433,11 @@ class WalletRpcClient(RpcClient):
|
||||
}
|
||||
return await self.fetch("create_new_wallet", request)
|
||||
|
||||
async def get_cat_asset_id(self, wallet_id: str) -> bytes:
|
||||
async def get_cat_asset_id(self, wallet_id: str) -> bytes32:
|
||||
request: Dict[str, Any] = {
|
||||
"wallet_id": wallet_id,
|
||||
}
|
||||
return bytes.fromhex((await self.fetch("cat_get_asset_id", request))["asset_id"])
|
||||
return bytes32.from_hexstr((await self.fetch("cat_get_asset_id", request))["asset_id"])
|
||||
|
||||
async def get_stray_cats(self) -> Dict:
|
||||
response = await self.fetch("get_stray_cats", {})
|
||||
@ -570,27 +574,59 @@ class WalletRpcClient(RpcClient):
|
||||
response = await self.fetch("create_new_wallet", request)
|
||||
return response
|
||||
|
||||
async def mint_nft(self, wallet_id, artist_address, hash, uris, fee):
|
||||
async def mint_nft(
|
||||
self,
|
||||
wallet_id,
|
||||
royalty_address,
|
||||
target_address,
|
||||
hash,
|
||||
uris,
|
||||
meta_hash="00",
|
||||
meta_uris=[],
|
||||
license_hash="00",
|
||||
license_uris=[],
|
||||
series_total=1,
|
||||
series_number=1,
|
||||
fee=0,
|
||||
):
|
||||
request: Dict[str, Any] = {
|
||||
"wallet_id": wallet_id,
|
||||
"artist_address": artist_address,
|
||||
"royalty_address": royalty_address,
|
||||
"target_address": target_address,
|
||||
"hash": hash,
|
||||
"uris": uris,
|
||||
"meta_hash": meta_hash,
|
||||
"meta_uris": meta_uris,
|
||||
"license_hash": license_hash,
|
||||
"license_uris": license_uris,
|
||||
"series_number": series_number,
|
||||
"series_total": series_total,
|
||||
"fee": fee,
|
||||
}
|
||||
response = await self.fetch("nft_mint_nft", request)
|
||||
return response
|
||||
|
||||
async def add_uri_to_nft(self, wallet_id, nft_coin_id, uri, fee):
|
||||
request: Dict[str, Any] = {"wallet_id": wallet_id, "nft_coin_id": nft_coin_id, "uri": uri, "fee": fee}
|
||||
response = await self.fetch("nft_add_uri", request)
|
||||
return response
|
||||
|
||||
async def transfer_nft(self, wallet_id, nft_coin_id, artist_address, fee):
|
||||
async def add_uri_to_nft(self, wallet_id, nft_coin_id, key, uri, fee):
|
||||
request: Dict[str, Any] = {
|
||||
"wallet_id": wallet_id,
|
||||
"nft_coin_id": nft_coin_id,
|
||||
"target_address": artist_address,
|
||||
"uri": uri,
|
||||
"key": key,
|
||||
"fee": fee,
|
||||
}
|
||||
response = await self.fetch("nft_add_uri", request)
|
||||
return response
|
||||
|
||||
async def get_nft_info(self, coin_id: bytes32, latest: bool = True):
|
||||
request: Dict[str, Any] = {"coin_id": coin_id.hex(), "latest": latest}
|
||||
response = await self.fetch("nft_get_info", request)
|
||||
return response
|
||||
|
||||
async def transfer_nft(self, wallet_id, nft_coin_id, target_address, fee):
|
||||
request: Dict[str, Any] = {
|
||||
"wallet_id": wallet_id,
|
||||
"nft_coin_id": nft_coin_id,
|
||||
"target_address": target_address,
|
||||
"fee": fee,
|
||||
}
|
||||
response = await self.fetch("nft_transfer_nft", request)
|
||||
|
@ -263,13 +263,17 @@ class ChiaServer:
|
||||
self.chia_ca_crt_path, self.chia_ca_key_path, self.p2p_crt_path, self.p2p_key_path, log=self.log
|
||||
)
|
||||
|
||||
# If self._port is set to zero, the socket will bind to a new available port. Therefore, we have to obtain
|
||||
# this port from the socket itself and update self._port.
|
||||
self.site = web.TCPSite(
|
||||
self.runner,
|
||||
port=self._port,
|
||||
host="0.0.0.0",
|
||||
port=int(self._port),
|
||||
shutdown_timeout=3,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
await self.site.start()
|
||||
self._port = self.runner.addresses[0][1]
|
||||
self.log.info(f"Started listening on port: {self._port}")
|
||||
|
||||
async def incoming_connection(self, request):
|
||||
@ -304,14 +308,13 @@ class ChiaServer:
|
||||
self._outbound_rate_limit_percent,
|
||||
close_event,
|
||||
)
|
||||
handshake = await connection.perform_handshake(
|
||||
await connection.perform_handshake(
|
||||
self._network_id,
|
||||
protocol_version,
|
||||
self._port,
|
||||
self._local_type,
|
||||
)
|
||||
|
||||
assert handshake is True
|
||||
# Limit inbound connections to config's specifications.
|
||||
if not self.accept_inbound_connections(connection.connection_type) and not is_in_network(
|
||||
connection.peer_host, self.exempt_peer_networks
|
||||
@ -458,13 +461,12 @@ class ChiaServer:
|
||||
self._outbound_rate_limit_percent,
|
||||
session=session,
|
||||
)
|
||||
handshake = await connection.perform_handshake(
|
||||
await connection.perform_handshake(
|
||||
self._network_id,
|
||||
protocol_version,
|
||||
self._port,
|
||||
self._local_type,
|
||||
)
|
||||
assert handshake is True
|
||||
await self.connection_added(connection, on_connect)
|
||||
# the session has been adopted by the connection, don't close it at
|
||||
# the end of the function
|
||||
@ -784,6 +786,9 @@ class ChiaServer:
|
||||
return None
|
||||
return peer
|
||||
|
||||
def get_port(self) -> uint16:
|
||||
return uint16(self._port)
|
||||
|
||||
def accept_inbound_connections(self, node_type: NodeType) -> bool:
|
||||
if not self._local_type == NodeType.FULL_NODE:
|
||||
return True
|
||||
|
@ -54,6 +54,7 @@ class Service:
|
||||
connect_to_daemon=True,
|
||||
running_new_process=True,
|
||||
service_name_prefix="",
|
||||
max_request_body_size: Optional[int] = None,
|
||||
) -> None:
|
||||
self.root_path = root_path
|
||||
self.config = load_config(root_path, "config.yaml")
|
||||
@ -67,6 +68,7 @@ class Service:
|
||||
self._rpc_task: Optional[asyncio.Task] = None
|
||||
self._rpc_close_task: Optional[asyncio.Task] = None
|
||||
self._network_id: str = network_id
|
||||
self.max_request_body_size = max_request_body_size
|
||||
self._running_new_process = running_new_process
|
||||
|
||||
# when we start this service as a component of an existing process,
|
||||
@ -159,6 +161,7 @@ class Service:
|
||||
self.upnp.remap(port)
|
||||
|
||||
await self._server.start_server(self._on_connect_callback)
|
||||
self._advertised_port = self._server.get_port()
|
||||
|
||||
self._reconnect_tasks = [
|
||||
start_reconnect_task(self._server, _, self._log, self._auth_connect_peers, self.config.get("prefer_ipv6"))
|
||||
@ -179,6 +182,8 @@ class Service:
|
||||
self.root_path,
|
||||
self.config,
|
||||
self._connect_to_daemon,
|
||||
max_request_body_size=self.max_request_body_size,
|
||||
name=self._service_name + "_rpc",
|
||||
)
|
||||
)
|
||||
|
||||
@ -246,7 +251,7 @@ class Service:
|
||||
|
||||
async def close_rpc_server() -> None:
|
||||
if self._rpc_task:
|
||||
await (await self._rpc_task)()
|
||||
await (await self._rpc_task)[0]()
|
||||
|
||||
self._rpc_close_task = asyncio.create_task(close_rpc_server())
|
||||
|
||||
|
@ -109,7 +109,13 @@ class WSChiaConnection:
|
||||
self.version = None
|
||||
self.protocol_version = ""
|
||||
|
||||
async def perform_handshake(self, network_id: str, protocol_version: str, server_port: int, local_type: NodeType):
|
||||
async def perform_handshake(
|
||||
self,
|
||||
network_id: str,
|
||||
protocol_version: str,
|
||||
server_port: int,
|
||||
local_type: NodeType,
|
||||
) -> None:
|
||||
if self.is_outbound:
|
||||
outbound_handshake = make_msg(
|
||||
ProtocolMessageTypes.handshake,
|
||||
@ -184,7 +190,6 @@ class WSChiaConnection:
|
||||
|
||||
self.outbound_task = asyncio.create_task(self.outbound_handler())
|
||||
self.inbound_task = asyncio.create_task(self.inbound_handler())
|
||||
return True
|
||||
|
||||
async def close(self, ban_time: int = 0, ws_close_code: WSCloseCode = WSCloseCode.OK, error: Optional[Err] = None):
|
||||
"""
|
||||
|
@ -128,7 +128,7 @@ class Timelord:
|
||||
self.vdf_server = await asyncio.start_server(
|
||||
self._handle_client,
|
||||
self.config["vdf_server"]["host"],
|
||||
self.config["vdf_server"]["port"],
|
||||
int(self.config["vdf_server"]["port"]),
|
||||
)
|
||||
self.last_state: LastState = LastState(self.constants)
|
||||
slow_bluebox = self.config.get("slow_bluebox", False)
|
||||
@ -149,7 +149,12 @@ class Timelord:
|
||||
)
|
||||
else:
|
||||
self.main_loop = asyncio.create_task(self._manage_discriminant_queue_sanitizer())
|
||||
log.info("Started timelord.")
|
||||
log.info(f"Started timelord, listening on port {self.get_vdf_server_port()}")
|
||||
|
||||
def get_vdf_server_port(self) -> Optional[uint16]:
|
||||
if self.vdf_server is not None:
|
||||
return self.vdf_server.sockets[0].getsockname()[1]
|
||||
return None
|
||||
|
||||
def _close(self):
|
||||
self._shut_down = True
|
||||
|
@ -1,5 +1,6 @@
|
||||
import io
|
||||
from typing import BinaryIO, Type, TypeVar, TYPE_CHECKING
|
||||
from typing import BinaryIO, Iterable, SupportsBytes, Type, TypeVar, Union
|
||||
|
||||
from typing_extensions import SupportsIndex
|
||||
|
||||
_T_SizedBytes = TypeVar("_T_SizedBytes", bound="SizedBytes")
|
||||
|
||||
@ -20,29 +21,28 @@ class SizedBytes(bytes):
|
||||
|
||||
_size = 0
|
||||
|
||||
@staticmethod
|
||||
def __new__(cls: Type[_T_SizedBytes], v) -> _T_SizedBytes:
|
||||
v = bytes(v)
|
||||
if not isinstance(v, bytes) or len(v) != cls._size:
|
||||
raise ValueError("bad %s initializer %s" % (cls.__name__, v))
|
||||
return bytes.__new__(cls, v)
|
||||
# This is just a partial exposure of the underlying int constructor. Liskov...
|
||||
# https://github.com/python/typeshed/blob/f8547a3f3131de90aa47005358eb3394e79cfa13/stdlib/builtins.pyi#L483-L493
|
||||
def __init__(self, v: Union[Iterable[SupportsIndex], SupportsBytes]) -> None:
|
||||
# v is unused here and that is ok since .__new__() seems to have already
|
||||
# processed the parameter when creating the instance of the class. We have no
|
||||
# additional special action to take here beyond verifying that the newly
|
||||
# created instance satisfies the length limitation of the particular subclass.
|
||||
super().__init__()
|
||||
if len(self) != self._size:
|
||||
raise ValueError("bad %s initializer %s" % (type(self).__name__, v))
|
||||
|
||||
@classmethod
|
||||
def parse(cls: Type[_T_SizedBytes], f: BinaryIO) -> _T_SizedBytes:
|
||||
b = f.read(cls._size)
|
||||
assert len(b) == cls._size
|
||||
return cls(b)
|
||||
|
||||
def stream(self, f):
|
||||
def stream(self, f: BinaryIO) -> None:
|
||||
f.write(self)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls: Type[_T_SizedBytes], blob: bytes) -> _T_SizedBytes:
|
||||
# pylint: disable=no-member
|
||||
f = io.BytesIO(blob)
|
||||
result = cls.parse(f)
|
||||
assert f.read() == b""
|
||||
return result
|
||||
return cls(blob)
|
||||
|
||||
@classmethod
|
||||
def from_hexstr(cls: Type[_T_SizedBytes], input_str: str) -> _T_SizedBytes:
|
||||
@ -50,21 +50,8 @@ class SizedBytes(bytes):
|
||||
return cls.fromhex(input_str[2:])
|
||||
return cls.fromhex(input_str)
|
||||
|
||||
def __bytes__(self) -> bytes:
|
||||
f = io.BytesIO()
|
||||
self.stream(f)
|
||||
return bytes(f.getvalue())
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return self.hex()
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return "<%s: %s>" % (self.__class__.__name__, str(self))
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# TODO: This stub implements a fix already merged into typeshed but not yet
|
||||
# released in a new mypy version. Once released this should be removed.
|
||||
# https://github.com/python/typeshed/pull/6201
|
||||
@classmethod
|
||||
def fromhex(cls: Type[_T_SizedBytes], __s: str) -> _T_SizedBytes:
|
||||
...
|
||||
|
@ -33,6 +33,8 @@ def get_pairings(cache: LRUCache, pks: List[bytes48], msgs: Sequence[bytes], for
|
||||
|
||||
pk_parsed: Optional[G1Element] = pk_bytes_to_g1.get(pks[i])
|
||||
if pk_parsed is None:
|
||||
# In this case, we use from_bytes instead of from_bytes_unchecked, because we will not be using
|
||||
# the bls_signatures aggregate_verify method which performs the subgroup checks
|
||||
pk_parsed = G1Element.from_bytes(pks[i])
|
||||
pk_bytes_to_g1[pks[i]] = pk_parsed
|
||||
|
||||
@ -53,7 +55,8 @@ def aggregate_verify(
|
||||
):
|
||||
pairings: List[GTElement] = get_pairings(cache, pks, msgs, force_cache)
|
||||
if len(pairings) == 0:
|
||||
pks_objects: List[G1Element] = [G1Element.from_bytes(pk) for pk in pks]
|
||||
# Using AugSchemeMPL.aggregate_verify, so it's safe to use from_bytes_unchecked
|
||||
pks_objects: List[G1Element] = [G1Element.from_bytes_unchecked(pk) for pk in pks]
|
||||
return AugSchemeMPL.aggregate_verify(pks_objects, msgs, sig)
|
||||
|
||||
pairings_prod: GTElement = functools.reduce(GTElement.__mul__, pairings)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import argparse
|
||||
import contextlib
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
@ -262,3 +263,12 @@ def process_config_start_method(
|
||||
log.info(f"Selected multiprocessing start method: {choice}")
|
||||
|
||||
return processed_method
|
||||
|
||||
|
||||
def override_config(config: Dict[str, Any], config_overrides: Optional[Dict[str, Any]]):
|
||||
new_config = copy.deepcopy(config)
|
||||
if config_overrides is None:
|
||||
return new_config
|
||||
for k, v in config_overrides.items():
|
||||
add_property(new_config, k, v)
|
||||
return new_config
|
||||
|
@ -195,6 +195,7 @@ harvester:
|
||||
|
||||
# Plots are searched for in the following directories
|
||||
plot_directories: []
|
||||
recursive_plot_scan: False # If True the harvester scans plots recursively in the provided directories.
|
||||
|
||||
ssl:
|
||||
private_crt: "config/ssl/harvester/private_harvester.crt"
|
||||
@ -542,3 +543,6 @@ wallet:
|
||||
# if an unknown CAT belonging to us is seen, a wallet will be automatically created
|
||||
# the user accepts the risk/responsibility of verifying the authenticity and origin of unknown CATs
|
||||
automatically_add_unknown_cats: False
|
||||
|
||||
# Interval to resend unconfirmed transactions, even if previously accepted into Mempool
|
||||
tx_resend_timeout_secs: 1800
|
||||
|
@ -1,79 +1,63 @@
|
||||
from typing import Any, BinaryIO
|
||||
from __future__ import annotations
|
||||
|
||||
from chia.util.struct_stream import StructStream
|
||||
from chia.util.struct_stream import StructStream, parse_metadata_from_name
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class int8(StructStream):
|
||||
PACK = "!b"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class uint8(StructStream):
|
||||
PACK = "!B"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class int16(StructStream):
|
||||
PACK = "!h"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class uint16(StructStream):
|
||||
PACK = "!H"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class int32(StructStream):
|
||||
PACK = "!l"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class uint32(StructStream):
|
||||
PACK = "!L"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class int64(StructStream):
|
||||
PACK = "!q"
|
||||
pass
|
||||
|
||||
|
||||
@parse_metadata_from_name
|
||||
class uint64(StructStream):
|
||||
PACK = "!Q"
|
||||
pass
|
||||
|
||||
|
||||
class uint128(int):
|
||||
def __new__(cls: Any, value: int):
|
||||
value = int(value)
|
||||
if value > (2 ** 128) - 1 or value < 0:
|
||||
raise ValueError(f"Value {value} of does not fit into uint128")
|
||||
return int.__new__(cls, value)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, f: BinaryIO) -> Any:
|
||||
read_bytes = f.read(16)
|
||||
assert len(read_bytes) == 16
|
||||
n = int.from_bytes(read_bytes, "big", signed=False)
|
||||
assert n <= (2 ** 128) - 1 and n >= 0
|
||||
return cls(n)
|
||||
|
||||
def stream(self, f):
|
||||
assert self <= (2 ** 128) - 1 and self >= 0
|
||||
f.write(self.to_bytes(16, "big", signed=False))
|
||||
@parse_metadata_from_name
|
||||
class uint128(StructStream):
|
||||
pass
|
||||
|
||||
|
||||
class int512(int):
|
||||
def __new__(cls: Any, value: int):
|
||||
value = int(value)
|
||||
# note that the boundaries for int512 is not what you might expect. We
|
||||
# encode these with one extra byte, but only allow a range of
|
||||
# [-INT512_MAX, INT512_MAX]
|
||||
if value >= (2 ** 512) or value <= -(2 ** 512):
|
||||
raise ValueError(f"Value {value} of does not fit into in512")
|
||||
return int.__new__(cls, value)
|
||||
class int512(StructStream):
|
||||
PACK = None
|
||||
|
||||
# Uses 65 bytes to fit in the sign bit
|
||||
@classmethod
|
||||
def parse(cls, f: BinaryIO) -> Any:
|
||||
read_bytes = f.read(65)
|
||||
assert len(read_bytes) == 65
|
||||
n = int.from_bytes(read_bytes, "big", signed=True)
|
||||
assert n < (2 ** 512) and n > -(2 ** 512)
|
||||
return cls(n)
|
||||
SIZE = 65
|
||||
BITS = 512
|
||||
SIGNED = True
|
||||
|
||||
def stream(self, f):
|
||||
assert self < (2 ** 512) and self > -(2 ** 512)
|
||||
f.write(self.to_bytes(65, "big", signed=True))
|
||||
# note that the boundaries for int512 is not what you might expect. We
|
||||
# encode these with one extra byte, but only allow a range of
|
||||
# [-INT512_MAX, INT512_MAX]
|
||||
MAXIMUM_EXCLUSIVE = 2 ** BITS
|
||||
MINIMUM = -(2 ** BITS) + 1
|
||||
|
@ -1,6 +1,7 @@
|
||||
import contextlib
|
||||
import os
|
||||
import time
|
||||
from typing import Callable, Optional, TextIO, TypeVar
|
||||
from typing import Iterator, Optional, TextIO, TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@ -22,15 +23,13 @@ def create_exclusive_lock(lockfile: str) -> Optional[TextIO]:
|
||||
return f
|
||||
|
||||
|
||||
def with_lock(lock_filename: str, run: Callable[[], T]) -> T:
|
||||
@contextlib.contextmanager
|
||||
def lock_by_path(lock_filename: str) -> Iterator[None]:
|
||||
"""
|
||||
Ensure that this process and this thread is the only one operating on the
|
||||
resource associated with lock_filename systemwide.
|
||||
|
||||
Pass through the result of run after exiting the lock.
|
||||
"""
|
||||
|
||||
lock_file = None
|
||||
while True:
|
||||
lock_file = create_exclusive_lock(lock_filename)
|
||||
if lock_file is not None:
|
||||
@ -39,7 +38,7 @@ def with_lock(lock_filename: str, run: Callable[[], T]) -> T:
|
||||
time.sleep(0.1)
|
||||
|
||||
try:
|
||||
return run()
|
||||
yield
|
||||
finally:
|
||||
lock_file.close()
|
||||
os.remove(lock_filename)
|
||||
|
@ -1,4 +1,3 @@
|
||||
import dataclasses
|
||||
from typing import Any, Dict, Sequence, Union
|
||||
|
||||
from chia.util.streamable import recurse_jsonify
|
||||
@ -81,4 +80,5 @@ def get_list_or_len(list_in: Sequence[object], length: bool) -> Union[int, Seque
|
||||
|
||||
|
||||
def dataclass_to_json_dict(instance: Any) -> Dict[str, Any]:
|
||||
return recurse_jsonify(dataclasses.asdict(instance))
|
||||
ret: Dict[str, Any] = recurse_jsonify(instance)
|
||||
return ret
|
||||
|
@ -3,45 +3,17 @@ from __future__ import annotations
|
||||
import dataclasses
|
||||
import io
|
||||
import pprint
|
||||
import sys
|
||||
from enum import Enum
|
||||
from typing import (
|
||||
Any,
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
get_type_hints,
|
||||
overload,
|
||||
)
|
||||
from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional, Tuple, Type, TypeVar, Union, get_type_hints
|
||||
|
||||
from blspy import G1Element, G2Element, PrivateKey
|
||||
from typing_extensions import Literal
|
||||
from typing_extensions import Literal, get_args, get_origin
|
||||
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.byte_types import hexstr_to_bytes
|
||||
from chia.util.hash import std_hash
|
||||
from chia.util.ints import int64, int512, uint32, uint64, uint128
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
|
||||
def get_args(t: Type[Any]) -> Tuple[Any, ...]:
|
||||
return getattr(t, "__args__", ())
|
||||
|
||||
def get_origin(t: Type[Any]) -> Optional[Type[Any]]:
|
||||
return getattr(t, "__origin__", None)
|
||||
|
||||
else:
|
||||
|
||||
from typing import get_args, get_origin
|
||||
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=1, width=120, compact=True)
|
||||
|
||||
|
||||
@ -128,60 +100,48 @@ def dataclass_from_dict(klass: Type[Any], d: Any) -> Any:
|
||||
return klass(hexstr_to_bytes(d))
|
||||
elif klass.__name__ in unhashable_types:
|
||||
# Type is unhashable (bls type), so cast from hex string
|
||||
return klass.from_bytes(hexstr_to_bytes(d))
|
||||
if hasattr(klass, "from_bytes_unchecked"):
|
||||
from_bytes_method: Callable[[bytes], Any] = klass.from_bytes_unchecked
|
||||
else:
|
||||
from_bytes_method = klass.from_bytes
|
||||
return from_bytes_method(hexstr_to_bytes(d))
|
||||
else:
|
||||
# Type is a primitive, cast with correct class
|
||||
return klass(d)
|
||||
|
||||
|
||||
@overload
|
||||
def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...]]) -> List[Any]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def recurse_jsonify(d: Dict[str, Any]) -> Dict[str, Any]:
|
||||
...
|
||||
|
||||
|
||||
def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...], Dict[str, Any]]) -> Union[List[Any], Dict[str, Any]]:
|
||||
def recurse_jsonify(d: Any) -> Any:
|
||||
"""
|
||||
Makes bytes objects and unhashable types into strings with 0x, and makes large ints into
|
||||
strings.
|
||||
"""
|
||||
if isinstance(d, list) or isinstance(d, tuple):
|
||||
if dataclasses.is_dataclass(d):
|
||||
new_dict = {}
|
||||
for field in dataclasses.fields(d):
|
||||
new_dict[field.name] = recurse_jsonify(getattr(d, field.name))
|
||||
return new_dict
|
||||
|
||||
elif isinstance(d, list) or isinstance(d, tuple):
|
||||
new_list = []
|
||||
for item in d:
|
||||
if type(item).__name__ in unhashable_types or issubclass(type(item), bytes):
|
||||
item = f"0x{bytes(item).hex()}"
|
||||
if isinstance(item, dict):
|
||||
item = recurse_jsonify(item)
|
||||
if isinstance(item, list):
|
||||
item = recurse_jsonify(item)
|
||||
if isinstance(item, tuple):
|
||||
item = recurse_jsonify(item)
|
||||
if isinstance(item, Enum):
|
||||
item = item.name
|
||||
if isinstance(item, int) and type(item) in big_ints:
|
||||
item = int(item)
|
||||
new_list.append(item)
|
||||
d = new_list
|
||||
new_list.append(recurse_jsonify(item))
|
||||
return new_list
|
||||
|
||||
else:
|
||||
for key, value in d.items():
|
||||
if type(value).__name__ in unhashable_types or issubclass(type(value), bytes):
|
||||
d[key] = f"0x{bytes(value).hex()}"
|
||||
if isinstance(value, dict):
|
||||
d[key] = recurse_jsonify(value)
|
||||
if isinstance(value, list):
|
||||
d[key] = recurse_jsonify(value)
|
||||
if isinstance(value, tuple):
|
||||
d[key] = recurse_jsonify(value)
|
||||
if isinstance(value, Enum):
|
||||
d[key] = value.name
|
||||
if isinstance(value, int) and type(value) in big_ints:
|
||||
d[key] = int(value)
|
||||
return d
|
||||
elif isinstance(d, dict):
|
||||
new_dict = {}
|
||||
for name, val in d.items():
|
||||
new_dict[name] = recurse_jsonify(val)
|
||||
return new_dict
|
||||
|
||||
elif type(d).__name__ in unhashable_types or issubclass(type(d), bytes):
|
||||
return f"0x{bytes(d).hex()}"
|
||||
elif isinstance(d, Enum):
|
||||
return d.name
|
||||
elif isinstance(d, int):
|
||||
return int(d)
|
||||
elif d is None or type(d) == str:
|
||||
return d
|
||||
raise ValueError(f"failed to jsonify {d} (type: {type(d)})")
|
||||
|
||||
|
||||
def parse_bool(f: BinaryIO) -> bool:
|
||||
@ -239,10 +199,13 @@ def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[ParseFunctionType]) -
|
||||
return tuple(full_list)
|
||||
|
||||
|
||||
def parse_size_hints(f: BinaryIO, f_type: Type[Any], bytes_to_read: int) -> Any:
|
||||
def parse_size_hints(f: BinaryIO, f_type: Type[Any], bytes_to_read: int, unchecked: bool) -> Any:
|
||||
bytes_read = f.read(bytes_to_read)
|
||||
assert bytes_read is not None and len(bytes_read) == bytes_to_read
|
||||
return f_type.from_bytes(bytes_read)
|
||||
if unchecked:
|
||||
return f_type.from_bytes_unchecked(bytes_read)
|
||||
else:
|
||||
return f_type.from_bytes(bytes_read)
|
||||
|
||||
|
||||
def parse_str(f: BinaryIO) -> str:
|
||||
@ -425,10 +388,14 @@ class Streamable:
|
||||
try:
|
||||
item = f_type(item)
|
||||
except (TypeError, AttributeError, ValueError):
|
||||
if hasattr(f_type, "from_bytes_unchecked"):
|
||||
from_bytes_method: Callable[[bytes], Any] = f_type.from_bytes_unchecked
|
||||
else:
|
||||
from_bytes_method = f_type.from_bytes
|
||||
try:
|
||||
item = f_type.from_bytes(item)
|
||||
item = from_bytes_method(item)
|
||||
except Exception:
|
||||
item = f_type.from_bytes(bytes(item))
|
||||
item = from_bytes_method(bytes(item))
|
||||
if not isinstance(item, f_type):
|
||||
raise ValueError(f"Wrong type for {f_name}")
|
||||
return item
|
||||
@ -475,9 +442,12 @@ class Streamable:
|
||||
inner_types = get_args(f_type)
|
||||
list_parse_inner_type_f = [cls.function_to_parse_one_item(_) for _ in inner_types]
|
||||
return lambda f: parse_tuple(f, list_parse_inner_type_f)
|
||||
if hasattr(f_type, "from_bytes_unchecked") and f_type.__name__ in size_hints:
|
||||
bytes_to_read = size_hints[f_type.__name__]
|
||||
return lambda f: parse_size_hints(f, f_type, bytes_to_read, unchecked=True)
|
||||
if hasattr(f_type, "from_bytes") and f_type.__name__ in size_hints:
|
||||
bytes_to_read = size_hints[f_type.__name__]
|
||||
return lambda f: parse_size_hints(f, f_type, bytes_to_read)
|
||||
return lambda f: parse_size_hints(f, f_type, bytes_to_read, unchecked=False)
|
||||
if f_type is str:
|
||||
return parse_str
|
||||
raise NotImplementedError(f"Type {f_type} does not have parse")
|
||||
@ -556,13 +526,14 @@ class Streamable:
|
||||
return bytes(f.getvalue())
|
||||
|
||||
def __str__(self: Any) -> str:
|
||||
return pp.pformat(recurse_jsonify(dataclasses.asdict(self)))
|
||||
return pp.pformat(recurse_jsonify(self))
|
||||
|
||||
def __repr__(self: Any) -> str:
|
||||
return pp.pformat(recurse_jsonify(dataclasses.asdict(self)))
|
||||
return pp.pformat(recurse_jsonify(self))
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return recurse_jsonify(dataclasses.asdict(self))
|
||||
ret: Dict[str, Any] = recurse_jsonify(self)
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls: Any, json_dict: Dict[str, Any]) -> Any:
|
||||
|
@ -1,6 +1,4 @@
|
||||
import io
|
||||
import struct
|
||||
from typing import Any, BinaryIO, SupportsInt, Type, TypeVar, Union
|
||||
from typing import BinaryIO, SupportsInt, Type, TypeVar, Union
|
||||
|
||||
from typing_extensions import Protocol, SupportsIndex
|
||||
|
||||
@ -13,8 +11,45 @@ class SupportsTrunc(Protocol):
|
||||
...
|
||||
|
||||
|
||||
def parse_metadata_from_name(cls: Type[_T_StructStream]) -> Type[_T_StructStream]:
|
||||
# TODO: turn this around to calculate the PACK from the size and signedness
|
||||
|
||||
name_signedness, _, name_bit_size = cls.__name__.partition("int")
|
||||
cls.SIGNED = False if name_signedness == "u" else True
|
||||
try:
|
||||
cls.BITS = int(name_bit_size)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"expected integer suffix but got: {name_bit_size!r}") from e
|
||||
|
||||
if cls.BITS <= 0:
|
||||
raise ValueError(f"bit size must greater than zero but got: {cls.BITS}")
|
||||
|
||||
expected_name = f"{'' if cls.SIGNED else 'u'}int{cls.BITS}"
|
||||
if cls.__name__ != expected_name:
|
||||
raise ValueError(f"expected class name is {expected_name} but got: {cls.__name__}")
|
||||
|
||||
cls.SIZE, remainder = divmod(cls.BITS, 8)
|
||||
if remainder != 0:
|
||||
# There may be a good use case for removing this but until the details are
|
||||
# thought through we should avoid such cases.
|
||||
raise ValueError(f"cls.BITS must be a multiple of 8: {cls.BITS}")
|
||||
|
||||
if cls.SIGNED:
|
||||
cls.MAXIMUM_EXCLUSIVE = 2 ** (cls.BITS - 1)
|
||||
cls.MINIMUM = -(2 ** (cls.BITS - 1))
|
||||
else:
|
||||
cls.MAXIMUM_EXCLUSIVE = 2 ** cls.BITS
|
||||
cls.MINIMUM = 0
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
class StructStream(int):
|
||||
PACK = ""
|
||||
SIZE = 0
|
||||
BITS = 0
|
||||
SIGNED = False
|
||||
MAXIMUM_EXCLUSIVE = 0
|
||||
MINIMUM = 0
|
||||
|
||||
"""
|
||||
Create a class that can parse and stream itself based on a struct.pack template string.
|
||||
@ -22,39 +57,28 @@ class StructStream(int):
|
||||
|
||||
# This is just a partial exposure of the underlying int constructor. Liskov...
|
||||
# https://github.com/python/typeshed/blob/5d07ebc864577c04366fcc46b84479dbec033921/stdlib/builtins.pyi#L181-L185
|
||||
def __new__(
|
||||
cls: Type[_T_StructStream], value: Union[str, bytes, SupportsInt, SupportsIndex, SupportsTrunc]
|
||||
) -> _T_StructStream:
|
||||
value = int(value)
|
||||
try:
|
||||
v1 = struct.unpack(cls.PACK, struct.pack(cls.PACK, value))[0]
|
||||
if value != v1:
|
||||
raise ValueError(f"Value {value} does not fit into {cls.__name__}")
|
||||
except Exception:
|
||||
bits = struct.calcsize(cls.PACK) * 8
|
||||
raise ValueError(
|
||||
f"Value {value} of size {value.bit_length()} does not fit into " f"{cls.__name__} of size {bits}"
|
||||
)
|
||||
return int.__new__(cls, value)
|
||||
def __init__(self, value: Union[str, bytes, SupportsInt, SupportsIndex, SupportsTrunc]) -> None:
|
||||
# v is unused here and that is ok since .__new__() seems to have already
|
||||
# processed the parameter when creating the instance of the class. We have no
|
||||
# additional special action to take here beyond verifying that the newly
|
||||
# created instance satisfies the bounds limitations of the particular subclass.
|
||||
super().__init__()
|
||||
if not (self.MINIMUM <= self < self.MAXIMUM_EXCLUSIVE):
|
||||
raise ValueError(f"Value {self} does not fit into {type(self).__name__}")
|
||||
|
||||
@classmethod
|
||||
def parse(cls: Any, f: BinaryIO) -> Any:
|
||||
bytes_to_read = struct.calcsize(cls.PACK)
|
||||
read_bytes = f.read(bytes_to_read)
|
||||
assert read_bytes is not None and len(read_bytes) == bytes_to_read
|
||||
return cls(*struct.unpack(cls.PACK, read_bytes))
|
||||
def parse(cls: Type[_T_StructStream], f: BinaryIO) -> _T_StructStream:
|
||||
read_bytes = f.read(cls.SIZE)
|
||||
return cls.from_bytes(read_bytes)
|
||||
|
||||
def stream(self, f):
|
||||
f.write(struct.pack(self.PACK, self))
|
||||
def stream(self, f: BinaryIO) -> None:
|
||||
f.write(bytes(self))
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls: Any, blob: bytes) -> Any: # type: ignore
|
||||
f = io.BytesIO(blob)
|
||||
result = cls.parse(f)
|
||||
assert f.read() == b""
|
||||
return result
|
||||
def from_bytes(cls: Type[_T_StructStream], blob: bytes) -> _T_StructStream: # type: ignore[override]
|
||||
if len(blob) != cls.SIZE:
|
||||
raise ValueError(f"{cls.__name__}.from_bytes() requires {cls.SIZE} bytes but got: {len(blob)}")
|
||||
return cls(int.from_bytes(blob, "big", signed=cls.SIGNED))
|
||||
|
||||
def __bytes__(self: Any) -> bytes:
|
||||
f = io.BytesIO()
|
||||
self.stream(f)
|
||||
return bytes(f.getvalue())
|
||||
def __bytes__(self) -> bytes:
|
||||
return super().to_bytes(length=self.SIZE, byteorder="big", signed=self.SIGNED)
|
||||
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
import dataclasses
|
||||
import logging
|
||||
import time
|
||||
import traceback
|
||||
from secrets import token_bytes
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
@ -11,14 +12,14 @@ from blspy import AugSchemeMPL, G2Element
|
||||
from chia.consensus.cost_calculator import NPCResult
|
||||
from chia.full_node.bundle_tools import simple_solution_generator
|
||||
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
|
||||
from chia.types.announcement import Announcement
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import Program
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.announcement import Announcement
|
||||
from chia.types.coin_spend import CoinSpend
|
||||
from chia.types.condition_opcodes import ConditionOpcode
|
||||
from chia.types.generator_types import BlockGenerator
|
||||
from chia.types.spend_bundle import SpendBundle
|
||||
from chia.types.condition_opcodes import ConditionOpcode
|
||||
from chia.util.byte_types import hexstr_to_bytes
|
||||
from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
|
||||
from chia.util.hash import std_hash
|
||||
@ -29,11 +30,12 @@ from chia.wallet.cat_wallet.cat_utils import (
|
||||
CAT_MOD,
|
||||
SpendableCAT,
|
||||
construct_cat_puzzle,
|
||||
unsigned_spend_bundle_for_spendable_cats,
|
||||
match_cat_puzzle,
|
||||
unsigned_spend_bundle_for_spendable_cats,
|
||||
)
|
||||
from chia.wallet.derivation_record import DerivationRecord
|
||||
from chia.wallet.cat_wallet.lineage_store import CATLineageStore
|
||||
from chia.wallet.coin_selection import select_coins
|
||||
from chia.wallet.derivation_record import DerivationRecord
|
||||
from chia.wallet.lineage_proof import LineageProof
|
||||
from chia.wallet.outer_puzzles import AssetType
|
||||
from chia.wallet.puzzle_drivers import PuzzleInfo
|
||||
@ -44,14 +46,12 @@ from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
|
||||
calculate_synthetic_secret_key,
|
||||
)
|
||||
from chia.wallet.transaction_record import TransactionRecord
|
||||
from chia.wallet.util.compute_memos import compute_memos
|
||||
from chia.wallet.util.transaction_type import TransactionType
|
||||
from chia.wallet.util.wallet_types import WalletType, AmountWithPuzzlehash
|
||||
from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
|
||||
from chia.wallet.wallet import Wallet
|
||||
from chia.wallet.wallet_coin_record import WalletCoinRecord
|
||||
from chia.wallet.wallet_info import WalletInfo
|
||||
from chia.wallet.util.compute_memos import compute_memos
|
||||
import traceback
|
||||
|
||||
|
||||
# This should probably not live in this file but it's for experimental right now
|
||||
|
||||
@ -444,51 +444,35 @@ class CATWallet:
|
||||
|
||||
return result
|
||||
|
||||
async def select_coins(self, amount: uint64) -> Set[Coin]:
|
||||
async def select_coins(
|
||||
self, amount: uint64, exclude: Optional[List[Coin]] = None, min_coin_amount: Optional[uint128] = None
|
||||
) -> Set[Coin]:
|
||||
"""
|
||||
Returns a set of coins that can be used for generating a new transaction.
|
||||
Note: Must be called under wallet state manager lock
|
||||
"""
|
||||
|
||||
spendable_am = await self.get_confirmed_balance()
|
||||
|
||||
if amount > spendable_am:
|
||||
error_msg = f"Can't select amount higher than our spendable balance {amount}, spendable {spendable_am}"
|
||||
self.log.warning(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
self.log.info(f"About to select coins for amount {amount}")
|
||||
spendable: List[WalletCoinRecord] = await self.get_cat_spendable_coins()
|
||||
|
||||
sum = 0
|
||||
used_coins: Set = set()
|
||||
|
||||
# Use older coins first
|
||||
spendable.sort(key=lambda r: r.confirmed_block_height)
|
||||
spendable_amount: uint128 = await self.get_spendable_balance()
|
||||
spendable_coins: List[WalletCoinRecord] = await self.get_cat_spendable_coins()
|
||||
|
||||
# Try to use coins from the store, if there isn't enough of "unused"
|
||||
# coins use change coins that are not confirmed yet
|
||||
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
|
||||
self.id()
|
||||
)
|
||||
for coinrecord in spendable:
|
||||
if sum >= amount and len(used_coins) > 0:
|
||||
break
|
||||
if coinrecord.coin.name() in unconfirmed_removals:
|
||||
continue
|
||||
sum += coinrecord.coin.amount
|
||||
used_coins.add(coinrecord.coin)
|
||||
self.log.info(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
|
||||
|
||||
# This happens when we couldn't use one of the coins because it's already used
|
||||
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
|
||||
if sum < amount:
|
||||
raise ValueError(
|
||||
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
|
||||
)
|
||||
|
||||
self.log.info(f"Successfully selected coins: {used_coins}")
|
||||
return used_coins
|
||||
coins = await select_coins(
|
||||
spendable_amount,
|
||||
self.wallet_state_manager.constants.MAX_COIN_AMOUNT,
|
||||
spendable_coins,
|
||||
unconfirmed_removals,
|
||||
self.log,
|
||||
uint128(amount),
|
||||
exclude,
|
||||
min_coin_amount,
|
||||
)
|
||||
assert coins is not None and len(coins) > 0
|
||||
assert sum(c.amount for c in coins) >= amount
|
||||
return coins
|
||||
|
||||
async def sign(self, spend_bundle: SpendBundle) -> SpendBundle:
|
||||
sigs: List[G2Element] = []
|
||||
|
157
chia/wallet/coin_selection.py
Normal file
157
chia/wallet/coin_selection.py
Normal file
@ -0,0 +1,157 @@
|
||||
import logging
|
||||
import random
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.ints import uint64, uint128
|
||||
from chia.wallet.wallet_coin_record import WalletCoinRecord
|
||||
|
||||
|
||||
async def select_coins(
|
||||
spendable_amount: uint128,
|
||||
max_coin_amount: int,
|
||||
spendable_coins: List[WalletCoinRecord],
|
||||
unconfirmed_removals: Dict[bytes32, Coin],
|
||||
log: logging.Logger,
|
||||
amount: uint128,
|
||||
exclude: Optional[List[Coin]] = None,
|
||||
min_coin_amount: Optional[uint128] = None,
|
||||
) -> Set[Coin]:
|
||||
"""
|
||||
Returns a set of coins that can be used for generating a new transaction.
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
if min_coin_amount is None:
|
||||
min_coin_amount = uint128(0)
|
||||
|
||||
if amount > spendable_amount:
|
||||
error_msg = (
|
||||
f"Can't select amount higher than our spendable balance. Amount: {amount}, spendable: {spendable_amount}"
|
||||
)
|
||||
log.warning(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
log.debug(f"About to select coins for amount {amount}")
|
||||
|
||||
max_num_coins = 500
|
||||
sum_spendable_coins = 0
|
||||
valid_spendable_coins: List[Coin] = []
|
||||
|
||||
for coin_record in spendable_coins: # remove all the unconfirmed coins, excluded coins and dust.
|
||||
if coin_record.coin.name() in unconfirmed_removals:
|
||||
continue
|
||||
if coin_record.coin in exclude:
|
||||
continue
|
||||
if coin_record.coin.amount < min_coin_amount:
|
||||
continue
|
||||
valid_spendable_coins.append(coin_record.coin)
|
||||
sum_spendable_coins += coin_record.coin.amount
|
||||
|
||||
# This happens when we couldn't use one of the coins because it's already used
|
||||
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
|
||||
if sum_spendable_coins < amount:
|
||||
raise ValueError(
|
||||
f"Transaction for {amount} is greater than spendable balance of {sum_spendable_coins}. "
|
||||
"There may be other transactions pending or our minimum coin amount is too high."
|
||||
)
|
||||
|
||||
# Sort the coins by amount
|
||||
valid_spendable_coins.sort(reverse=True, key=lambda r: r.amount)
|
||||
|
||||
# check for exact 1 to 1 coin match.
|
||||
exact_match_coin: Optional[Coin] = check_for_exact_match(valid_spendable_coins, uint64(amount))
|
||||
if exact_match_coin:
|
||||
log.debug(f"selected coin with an exact match: {exact_match_coin}")
|
||||
return {exact_match_coin}
|
||||
|
||||
# Check for an exact match with all of the coins smaller than the amount.
|
||||
# If we have more, smaller coins than the amount we run the next algorithm.
|
||||
smaller_coin_sum = 0 # coins smaller than target.
|
||||
smaller_coins: List[Coin] = []
|
||||
for coin in valid_spendable_coins:
|
||||
if coin.amount < amount:
|
||||
smaller_coin_sum += coin.amount
|
||||
smaller_coins.append(coin)
|
||||
if smaller_coin_sum == amount and len(smaller_coins) < max_num_coins:
|
||||
log.debug(f"Selected all smaller coins because they equate to an exact match of the target.: {smaller_coins}")
|
||||
return set(smaller_coins)
|
||||
elif smaller_coin_sum < amount:
|
||||
smallest_coin = select_smallest_coin_over_target(len(smaller_coins), valid_spendable_coins)
|
||||
log.debug(f"Selected closest greater coin: {smallest_coin.name()}")
|
||||
return {smallest_coin}
|
||||
elif smaller_coin_sum > amount:
|
||||
coin_set = knapsack_coin_algorithm(smaller_coins, amount, max_coin_amount)
|
||||
log.debug(f"Selected coins from knapsack algorithm: {coin_set}")
|
||||
if coin_set is None:
|
||||
raise ValueError("Knapsack algorithm failed to find a solution.")
|
||||
if len(coin_set) > max_num_coins:
|
||||
coin = select_smallest_coin_over_target(len(smaller_coins), valid_spendable_coins)
|
||||
if coin is None or coin.amount < amount:
|
||||
raise ValueError(
|
||||
f"Transaction of {amount} mojo would use more than "
|
||||
f"{max_num_coins} coins. Try sending a smaller amount"
|
||||
)
|
||||
coin_set = {coin}
|
||||
return coin_set
|
||||
else:
|
||||
# if smaller_coin_sum == amount and len(smaller_coins) >= max_num_coins.
|
||||
coin = select_smallest_coin_over_target(len(smaller_coins), valid_spendable_coins)
|
||||
log.debug(f"Resorted to selecting smallest coin over target due to dust.: {coin}")
|
||||
return {coin}
|
||||
|
||||
|
||||
# These algorithms were based off of the algorithms in:
|
||||
# https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf
|
||||
|
||||
# we use this to check if one of the coins exactly matches the target.
|
||||
def check_for_exact_match(coin_list: List[Coin], target: uint64) -> Optional[Coin]:
|
||||
for coin in coin_list:
|
||||
if coin.amount == target:
|
||||
return coin
|
||||
return None
|
||||
|
||||
|
||||
# amount of coins smaller than target, followed by a list of all valid spendable coins sorted in descending order.
|
||||
def select_smallest_coin_over_target(smaller_coin_amount: int, valid_spendable_coin_list: List[Coin]) -> Coin:
|
||||
if smaller_coin_amount >= len(valid_spendable_coin_list):
|
||||
raise ValueError("Unable to select coins for this transaction. Try sending a smaller amount")
|
||||
if smaller_coin_amount > 0: # in case we only have bigger coins.
|
||||
greater_coins = valid_spendable_coin_list[:-smaller_coin_amount]
|
||||
else:
|
||||
greater_coins = valid_spendable_coin_list
|
||||
coin = greater_coins[len(greater_coins) - 1] # select the coin with the least value.
|
||||
return coin
|
||||
|
||||
|
||||
# we use this to find the set of coins which have total value closest to the target, but at least the target.
|
||||
# IMPORTANT: The coins have to be sorted in descending order or else this function will not work.
|
||||
def knapsack_coin_algorithm(smaller_coins: List[Coin], target: uint128, max_coin_amount: int) -> Optional[Set[Coin]]:
|
||||
best_set_sum = max_coin_amount
|
||||
best_set_of_coins: Optional[Set[Coin]] = None
|
||||
for i in range(1000):
|
||||
# reset these variables every loop.
|
||||
selected_coins: Set[Coin] = set()
|
||||
selected_coins_sum = 0
|
||||
n_pass = 0
|
||||
target_reached = False
|
||||
while n_pass < 2 and not target_reached:
|
||||
for coin in smaller_coins:
|
||||
# run 2 passes where the first pass may select a coin 50% of the time.
|
||||
# the second pass runs to finish the set if the first pass didn't finish the set.
|
||||
# this makes each trial random and increases the chance of getting a perfect set.
|
||||
if (n_pass == 0 and bool(random.getrandbits(1))) or (n_pass == 1 and coin not in selected_coins):
|
||||
selected_coins_sum += coin.amount
|
||||
selected_coins.add(coin)
|
||||
if selected_coins_sum == target:
|
||||
return selected_coins
|
||||
if selected_coins_sum > target:
|
||||
target_reached = True
|
||||
if selected_coins_sum < best_set_sum:
|
||||
best_set_of_coins = selected_coins.copy()
|
||||
best_set_sum = selected_coins_sum
|
||||
selected_coins_sum -= coin.amount
|
||||
selected_coins.remove(coin)
|
||||
n_pass += 1
|
||||
return best_set_of_coins
|
@ -260,9 +260,8 @@ class DIDWallet:
|
||||
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
|
||||
name, WalletType.DISTRIBUTED_ID.value, info_as_string, in_transaction=True
|
||||
)
|
||||
|
||||
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id)
|
||||
await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id)
|
||||
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id, in_transaction=True)
|
||||
await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id, in_transaction=True)
|
||||
await self.load_parent(self.did_info)
|
||||
self.log.info(f"New DID wallet created {info_as_string}.")
|
||||
if self.wallet_info is None:
|
||||
@ -461,8 +460,13 @@ class DIDWallet:
|
||||
"""
|
||||
# full_puz = did_wallet_puzzles.create_fullpuz(innerpuz, origin.name())
|
||||
# All additions in this block here:
|
||||
new_puzhash = await self.get_new_did_inner_hash()
|
||||
new_pubkey = bytes((await self.wallet_state_manager.get_unused_derivation_record(self.wallet_info.id)).pubkey)
|
||||
|
||||
new_pubkey = bytes(
|
||||
(
|
||||
await self.wallet_state_manager.get_unused_derivation_record(self.wallet_info.id, in_transaction=True)
|
||||
).pubkey
|
||||
)
|
||||
new_puzhash = puzzle_for_pk(new_pubkey).get_tree_hash()
|
||||
parent_info = None
|
||||
assert did_info.origin_coin is not None
|
||||
assert did_info.current_inner is not None
|
||||
@ -481,8 +485,7 @@ class DIDWallet:
|
||||
did_info.current_inner.get_tree_hash(),
|
||||
coin.amount,
|
||||
)
|
||||
|
||||
await self.add_parent(coin.name(), future_parent, False)
|
||||
await self.add_parent(coin.name(), future_parent, True)
|
||||
if children_state.spent_height != children_state.created_height:
|
||||
did_info = DIDInfo(
|
||||
did_info.origin_coin,
|
||||
@ -496,7 +499,8 @@ class DIDWallet:
|
||||
False,
|
||||
did_info.metadata,
|
||||
)
|
||||
await self.save_info(did_info, False)
|
||||
|
||||
await self.save_info(did_info, True)
|
||||
assert children_state.created_height
|
||||
puzzle_solution_request = wallet_protocol.RequestPuzzleSolution(
|
||||
coin.parent_coin_info, children_state.created_height
|
||||
@ -514,7 +518,7 @@ class DIDWallet:
|
||||
parent_innerpuz.get_tree_hash(),
|
||||
parent_state.coin.amount,
|
||||
)
|
||||
await self.add_parent(coin.parent_coin_info, parent_info, False)
|
||||
await self.add_parent(coin.parent_coin_info, parent_info, True)
|
||||
assert parent_info is not None
|
||||
|
||||
def puzzle_for_pk(self, pubkey: G1Element) -> Program:
|
||||
@ -1179,13 +1183,16 @@ class DIDWallet:
|
||||
|
||||
async def generate_eve_spend(self, coin: Coin, full_puzzle: Program, innerpuz: Program):
|
||||
assert self.did_info.origin_coin is not None
|
||||
uncurried = did_wallet_puzzles.uncurry_innerpuz(innerpuz)
|
||||
assert uncurried is not None
|
||||
p2_puzzle = uncurried[0]
|
||||
# innerpuz solution is (mode p2_solution)
|
||||
p2_solution = self.standard_wallet.make_solution(
|
||||
primaries=[
|
||||
{
|
||||
"puzzlehash": innerpuz.get_tree_hash(),
|
||||
"amount": uint64(coin.amount),
|
||||
"memos": [innerpuz.get_tree_hash()],
|
||||
"memos": [p2_puzzle.get_tree_hash()],
|
||||
}
|
||||
]
|
||||
)
|
||||
|
@ -1,4 +1,3 @@
|
||||
from clvm_tools.binutils import assemble
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.blockchain_format.program import Program
|
||||
from typing import List, Optional, Tuple, Iterator, Dict
|
||||
@ -205,7 +204,7 @@ def metadata_to_program(metadata: Dict) -> Program:
|
||||
"""
|
||||
kv_list = []
|
||||
for key, value in metadata.items():
|
||||
kv_list.append((assemble(key), assemble(value)))
|
||||
kv_list.append((key, value))
|
||||
return Program.to(kv_list)
|
||||
|
||||
|
||||
|
@ -1,8 +1,12 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from chia.util.ints import uint64
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.ints import uint16, uint64
|
||||
from chia.util.streamable import Streamable, streamable
|
||||
from chia.wallet.puzzles.load_clvm import load_clvm
|
||||
|
||||
LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
|
||||
|
||||
|
||||
@streamable
|
||||
@ -10,41 +14,47 @@ from chia.util.streamable import Streamable, streamable
|
||||
class NFTInfo(Streamable):
|
||||
"""NFT Info for displaying NFT on the UI"""
|
||||
|
||||
launcher_id: str
|
||||
launcher_id: bytes32
|
||||
"""Launcher coin ID"""
|
||||
|
||||
nft_coin_id: str
|
||||
nft_coin_id: bytes32
|
||||
"""Current NFT coin ID"""
|
||||
|
||||
did_owner: str
|
||||
did_owner: Optional[bytes32]
|
||||
"""Owner DID"""
|
||||
|
||||
royalty: uint64
|
||||
royalty: Optional[uint16]
|
||||
"""Percentage of the transaction fee paid to the author, e.g. 1000 = 1%"""
|
||||
|
||||
data_uris: List[str]
|
||||
""" A list of content URIs"""
|
||||
|
||||
data_hash: str
|
||||
data_hash: bytes
|
||||
"""Hash of the content"""
|
||||
|
||||
metadata_uris: List[str]
|
||||
"""A list of metadata URIs"""
|
||||
|
||||
metadata_hash: str
|
||||
metadata_hash: bytes
|
||||
"""Hash of the metadata"""
|
||||
|
||||
license_uris: List[str]
|
||||
"""A list of license URIs"""
|
||||
|
||||
license_hash: str
|
||||
license_hash: bytes
|
||||
"""Hash of the license"""
|
||||
|
||||
version: str
|
||||
"""Current NFT version"""
|
||||
|
||||
edition_count: uint64
|
||||
series_total: uint64
|
||||
"""How many NFTs in the current series"""
|
||||
|
||||
edition_number: uint64
|
||||
series_number: uint64
|
||||
"""Number of the current NFT in the series"""
|
||||
|
||||
updater_puzhash: bytes32
|
||||
"""Puzzle hash of the metadata updater in hex"""
|
||||
|
||||
chain_info: str
|
||||
"""Information saved on the chain in hex"""
|
||||
|
||||
launcher_puzhash: bytes32 = LAUNCHER_PUZZLE.get_tree_hash()
|
||||
"""Puzzle hash of the singleton launcher in hex"""
|
||||
|
@ -1,8 +1,9 @@
|
||||
import logging
|
||||
from typing import Any, List, Tuple
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from blspy import G1Element
|
||||
from clvm.casts import int_from_bytes
|
||||
from clvm_tools.binutils import disassemble
|
||||
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import Program
|
||||
@ -13,7 +14,6 @@ from chia.wallet.nft_wallet.uncurry_nft import UncurriedNFT
|
||||
from chia.wallet.puzzles.cat_loader import CAT_MOD
|
||||
from chia.wallet.puzzles.load_clvm import load_clvm
|
||||
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import solution_for_conditions
|
||||
from chia.wallet.util.debug_spend_bundle import disassemble
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
SINGLETON_TOP_LAYER_MOD = load_clvm("singleton_top_layer_v1_1.clvm")
|
||||
@ -88,30 +88,90 @@ def get_nft_info_from_puzzle(puzzle: Program, nft_coin: Coin) -> NFTInfo:
|
||||
:param nft_coin: NFT coin
|
||||
:return: NFTInfo
|
||||
"""
|
||||
# TODO Update this method after the NFT code finalized
|
||||
uncurried_nft: UncurriedNFT = UncurriedNFT.uncurry(puzzle)
|
||||
data_uris = []
|
||||
data_uris: List[str] = []
|
||||
for uri in uncurried_nft.data_uris.as_python():
|
||||
data_uris.append(str(uri, "utf-8"))
|
||||
meta_uris: List[str] = []
|
||||
for uri in uncurried_nft.meta_uris.as_python():
|
||||
meta_uris.append(str(uri, "utf-8"))
|
||||
license_uris: List[str] = []
|
||||
for uri in uncurried_nft.license_uris.as_python():
|
||||
license_uris.append(str(uri, "utf-8"))
|
||||
|
||||
nft_info = NFTInfo(
|
||||
uncurried_nft.singleton_launcher_id.as_python().hex().upper(),
|
||||
nft_coin.name().hex().upper(),
|
||||
uncurried_nft.owner_did.as_python().hex().upper(),
|
||||
uint64(uncurried_nft.trade_price_percentage.as_int()),
|
||||
uncurried_nft.singleton_launcher_id.as_python(),
|
||||
nft_coin.name(),
|
||||
uncurried_nft.owner_did,
|
||||
uncurried_nft.trade_price_percentage,
|
||||
data_uris,
|
||||
uncurried_nft.data_hash.as_python().hex().upper(),
|
||||
[],
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"NFT1",
|
||||
uint64(1),
|
||||
uint64(1),
|
||||
uncurried_nft.data_hash.as_python(),
|
||||
meta_uris,
|
||||
uncurried_nft.meta_hash.as_python(),
|
||||
license_uris,
|
||||
uncurried_nft.license_hash.as_python(),
|
||||
uint64(uncurried_nft.series_total.as_int()),
|
||||
uint64(uncurried_nft.series_total.as_int()),
|
||||
uncurried_nft.metadata_updater_hash.as_python(),
|
||||
disassemble(uncurried_nft.metadata),
|
||||
)
|
||||
return nft_info
|
||||
|
||||
|
||||
def metadata_to_program(metadata: Dict[bytes, Any]) -> Program:
|
||||
"""
|
||||
Convert the metadata dict to a Chialisp program
|
||||
:param metadata: User defined metadata
|
||||
:return: Chialisp program
|
||||
"""
|
||||
kv_list = []
|
||||
for key, value in metadata.items():
|
||||
kv_list.append((key, value))
|
||||
program: Program = Program.to(kv_list)
|
||||
return program
|
||||
|
||||
|
||||
def program_to_metadata(program: Program) -> Dict[bytes, Any]:
|
||||
"""
|
||||
Convert a program to a metadata dict
|
||||
:param program: Chialisp program contains the metadata
|
||||
:return: Metadata dict
|
||||
"""
|
||||
metadata = {}
|
||||
for kv_pair in program.as_iter():
|
||||
metadata[kv_pair.first().as_atom()] = kv_pair.rest().as_python()
|
||||
return metadata
|
||||
|
||||
|
||||
def prepend_value(key: bytes, value: Program, metadata: Dict[bytes, Any]) -> None:
|
||||
"""
|
||||
Prepend a value to a list in the metadata
|
||||
:param key: Key of the field
|
||||
:param value: Value want to add
|
||||
:param metadata: Metadata
|
||||
:return:
|
||||
"""
|
||||
|
||||
if value != Program.to(0):
|
||||
if metadata[key] == b"":
|
||||
metadata[key] = [value.as_python()]
|
||||
else:
|
||||
metadata[key].insert(0, value.as_python())
|
||||
|
||||
|
||||
def update_metadata(metadata: Program, update_condition: Program) -> Program:
|
||||
"""
|
||||
Apply conditions of metadata updater to the previous metadata
|
||||
:param metadata: Previous metadata
|
||||
:param update_condition: Update metadata conditions
|
||||
:return: Updated metadata
|
||||
"""
|
||||
new_metadata: Dict[bytes, Any] = program_to_metadata(metadata)
|
||||
uri: Program = update_condition.rest().rest().first()
|
||||
prepend_value(uri.first().as_python(), uri.rest(), new_metadata)
|
||||
return metadata_to_program(new_metadata)
|
||||
|
||||
|
||||
def create_ownership_layer_puzzle(nft_id: bytes32, did_id: bytes32, p2_puzzle: Program, percentage: uint16) -> Program:
|
||||
log.debug(f"Creating ownership layer puzzle with {nft_id=} {did_id=} {percentage=} {p2_puzzle=}")
|
||||
singleton_struct = Program.to((SINGLETON_MOD_HASH, (nft_id, LAUNCHER_PUZZLE_HASH)))
|
||||
@ -146,7 +206,7 @@ def create_ownership_layer_transfer_solution(
|
||||
return solution
|
||||
|
||||
|
||||
def get_metadata_and_p2_puzhash(unft, solution: Program) -> Tuple[Program, bytes32]:
|
||||
def get_metadata_and_p2_puzhash(unft: UncurriedNFT, solution: Program) -> Tuple[Program, bytes32]:
|
||||
if unft.owner_did:
|
||||
conditions = solution.at("ffffrrrrrrf").as_iter()
|
||||
else:
|
||||
|
@ -30,6 +30,7 @@ from chia.wallet.nft_wallet.nft_puzzles import (
|
||||
NFT_STATE_LAYER_MOD_HASH,
|
||||
create_ownership_layer_puzzle,
|
||||
create_ownership_layer_transfer_solution,
|
||||
get_metadata_and_p2_puzhash,
|
||||
)
|
||||
from chia.wallet.nft_wallet.uncurry_nft import UncurriedNFT
|
||||
from chia.wallet.puzzles.load_clvm import load_clvm
|
||||
@ -50,7 +51,6 @@ from chia.wallet.wallet_info import WalletInfo
|
||||
|
||||
_T_NFTWallet = TypeVar("_T_NFTWallet", bound="NFTWallet")
|
||||
|
||||
|
||||
OFFER_MOD = load_clvm("settlement_payments.clvm")
|
||||
|
||||
|
||||
@ -222,10 +222,8 @@ class NFTWallet:
|
||||
f"found the info for NFT coin {coin_name} {uncurried_nft.inner_puzzle} {uncurried_nft.singleton_struct}"
|
||||
)
|
||||
singleton_id = bytes32(uncurried_nft.singleton_launcher_id.atom)
|
||||
metadata = uncurried_nft.metadata
|
||||
p2_puzzle = None
|
||||
parent_inner_puzhash = uncurried_nft.nft_state_layer.get_tree_hash()
|
||||
p2_puzzle_hash, metadata = uncurried_nft.get_metadata_and_p2_puzhash(solution)
|
||||
p2_puzzle_hash, metadata = get_metadata_and_p2_puzhash(uncurried_nft, solution)
|
||||
self.log.debug("Got back puzhash from solution: %s", p2_puzzle_hash)
|
||||
derivation_record: Optional[
|
||||
DerivationRecord
|
||||
@ -237,7 +235,6 @@ class NFTWallet:
|
||||
p2_puzzle = puzzle_for_pk(derivation_record.pubkey)
|
||||
if p2_puzzle is None:
|
||||
raise ValueError("Invalid puzzle")
|
||||
new_inner_puzzle = uncurried_nft.get_new_inner_puzzle(p2_puzzle, solution)
|
||||
parent_coin = None
|
||||
coin_record = await self.wallet_state_manager.coin_store.get_coin_record(coin_name)
|
||||
if coin_record is None:
|
||||
@ -253,15 +250,14 @@ class NFTWallet:
|
||||
self.log.debug("Got back updated metadata: %s", metadata)
|
||||
child_puzzle: Program = nft_puzzles.create_full_puzzle(
|
||||
singleton_id,
|
||||
metadata,
|
||||
bytes32(uncurried_nft.metdata_updater_hash.atom),
|
||||
new_inner_puzzle,
|
||||
Program.to(metadata),
|
||||
bytes32(uncurried_nft.metadata_updater_hash.atom),
|
||||
p2_puzzle,
|
||||
)
|
||||
self.log.debug(
|
||||
"Created NFT full puzzle with inner: %s",
|
||||
nft_puzzles.create_full_puzzle_with_nft_puzzle(singleton_id, uncurried_nft.inner_puzzle),
|
||||
)
|
||||
child_coin: Optional[Coin] = None
|
||||
for new_coin in coin_spend.additions():
|
||||
self.log.debug(
|
||||
"Comparing addition: %s with %s, amount: %s ",
|
||||
@ -343,12 +339,18 @@ class NFTWallet:
|
||||
return did_inner_hash, did_bundle
|
||||
|
||||
async def generate_new_nft(
|
||||
self, metadata: Program, target_puzzle: Optional[Program] = None, fee: uint64 = uint64(0), percentage=0
|
||||
self,
|
||||
metadata: Program,
|
||||
target_puzzle_hash: Optional[bytes32] = None,
|
||||
royalty_puzzle_hash: Optional[bytes32] = None,
|
||||
percentage=0,
|
||||
fee: uint64 = uint64(0),
|
||||
) -> Optional[SpendBundle]:
|
||||
# TODO Set royalty address after NFT1 chialisp finished
|
||||
"""
|
||||
This must be called under the wallet state manager lock
|
||||
"""
|
||||
amount = 1
|
||||
amount = uint64(1)
|
||||
coins = await self.standard_wallet.select_coins(amount)
|
||||
if coins is None:
|
||||
return None
|
||||
@ -404,10 +406,8 @@ class NFTWallet:
|
||||
|
||||
bundles_to_agg = [tx_record.spend_bundle, launcher_sb]
|
||||
|
||||
if not target_puzzle:
|
||||
if not target_puzzle_hash:
|
||||
target_puzzle_hash = inner_puzzle.get_tree_hash()
|
||||
else:
|
||||
target_puzzle_hash = target_puzzle.get_tree_hash()
|
||||
record: DerivationRecord
|
||||
# Create inner solution for eve spend
|
||||
if self.did_id:
|
||||
@ -504,7 +504,11 @@ class NFTWallet:
|
||||
return SpendBundle.aggregate([spend_bundle, SpendBundle([], agg_sig)])
|
||||
|
||||
async def _make_nft_transaction(
|
||||
self, nft_coin_info: NFTCoinInfo, inner_solution: Program, fee: uint64 = uint64(0)
|
||||
self,
|
||||
nft_coin_info: NFTCoinInfo,
|
||||
inner_solution: Program,
|
||||
puzzle_hashes_to_sign: List[bytes32],
|
||||
fee: uint64 = uint64(0),
|
||||
) -> TransactionRecord:
|
||||
|
||||
coin = nft_coin_info.coin
|
||||
@ -527,7 +531,7 @@ class NFTWallet:
|
||||
)
|
||||
list_of_coinspends = [CoinSpend(coin, full_puzzle.to_serialized_program(), full_solution)]
|
||||
spend_bundle = SpendBundle(list_of_coinspends, AugSchemeMPL.aggregate([]))
|
||||
spend_bundle = await self.sign(spend_bundle)
|
||||
spend_bundle = await self.sign(spend_bundle, puzzle_hashes_to_sign)
|
||||
full_spend = SpendBundle.aggregate([spend_bundle])
|
||||
self.log.debug("Memos are: %r", list(compute_memos(full_spend).items()))
|
||||
nft_record = TransactionRecord(
|
||||
@ -551,20 +555,23 @@ class NFTWallet:
|
||||
return nft_record
|
||||
|
||||
async def update_metadata(
|
||||
self, nft_coin_info: NFTCoinInfo, uri: str, fee: uint64 = uint64(0)
|
||||
self, nft_coin_info: NFTCoinInfo, key: str, uri: str, fee: uint64 = uint64(0)
|
||||
) -> Optional[SpendBundle]:
|
||||
coin = nft_coin_info.coin
|
||||
# we're not changing it
|
||||
|
||||
uncurried_nft = UncurriedNFT.uncurry(nft_coin_info.full_puzzle)
|
||||
|
||||
puzzle_hash = uncurried_nft.inner_puzzle.get_tree_hash()
|
||||
condition_list = [make_create_coin_condition(puzzle_hash, coin.amount, [puzzle_hash])]
|
||||
condition_list.append([int_to_bytes(-24), NFT_METADATA_UPDATER, uri.encode("utf-8")])
|
||||
condition_list = [
|
||||
make_create_coin_condition(puzzle_hash, coin.amount, [puzzle_hash]),
|
||||
[int_to_bytes(-24), NFT_METADATA_UPDATER, (key, uri)],
|
||||
]
|
||||
|
||||
self.log.info("Attempting to add a url to NFT coin %s in the metadata: %s", nft_coin_info, uri)
|
||||
self.log.info(
|
||||
"Attempting to add urls to NFT coin %s in the metadata: %s", nft_coin_info, uncurried_nft.metadata
|
||||
)
|
||||
inner_solution = solution_for_conditions(condition_list)
|
||||
nft_tx_record = await self._make_nft_transaction(nft_coin_info, inner_solution, fee)
|
||||
nft_tx_record = await self._make_nft_transaction(nft_coin_info, inner_solution, [puzzle_hash], fee)
|
||||
await self.standard_wallet.push_transaction(nft_tx_record)
|
||||
return nft_tx_record.spend_bundle
|
||||
|
||||
@ -583,7 +590,7 @@ class NFTWallet:
|
||||
condition_list = [make_create_coin_condition(puzzle_hash, amount, [bytes32(puzzle_hash)])]
|
||||
self.log.debug("Condition for new coin: %r", condition_list)
|
||||
inner_solution = solution_for_conditions(condition_list)
|
||||
nft_tx_record = await self._make_nft_transaction(nft_coin_info, inner_solution, fee)
|
||||
nft_tx_record = await self._make_nft_transaction(nft_coin_info, inner_solution, [puzzle_hash], fee)
|
||||
await self.standard_wallet.push_transaction(nft_tx_record)
|
||||
return nft_tx_record.spend_bundle
|
||||
|
||||
|
@ -8,6 +8,7 @@ from blspy import G1Element
|
||||
|
||||
from chia.types.blockchain_format.program import Program
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.ints import uint16
|
||||
from chia.wallet.puzzles.load_clvm import load_clvm
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -51,6 +52,13 @@ class UncurriedNFT:
|
||||
"""
|
||||
data_uris: Program
|
||||
data_hash: Program
|
||||
meta_uris: Program
|
||||
meta_hash: Program
|
||||
license_uris: Program
|
||||
license_hash: Program
|
||||
series_number: Program
|
||||
series_total: Program
|
||||
|
||||
inner_puzzle: Program
|
||||
"""NFT state layer inner puzzle"""
|
||||
|
||||
@ -71,8 +79,8 @@ class UncurriedNFT:
|
||||
Curried parameters of the transfer program
|
||||
[royalty_address, trade_price_percentage, settlement_mod_hash, cat_mod_hash]
|
||||
"""
|
||||
royalty_address: Optional[Program]
|
||||
trade_price_percentage: Optional[Program]
|
||||
royalty_address: Optional[bytes32]
|
||||
trade_price_percentage: Optional[uint16]
|
||||
|
||||
@classmethod
|
||||
def uncurry(cls: Type[_T_UncurriedNFT], puzzle: Program) -> UncurriedNFT:
|
||||
@ -98,15 +106,34 @@ class UncurriedNFT:
|
||||
raise ValueError(f"Cannot uncurry NFT puzzle, failed on NFT state layer: Mod {mod}")
|
||||
try:
|
||||
# Set nft parameters
|
||||
(nft_mod_hash, metadata, metadata_updater_hash, inner_puzzle) = curried_args.as_iter()
|
||||
|
||||
(nft_mod_hash, metadata, metadata_updater_hash, inner_puzzle) = curried_args.as_iter()
|
||||
data_uris = Program.to([])
|
||||
data_hash = Program.to(0)
|
||||
meta_uris = Program.to([])
|
||||
meta_hash = Program.to(0)
|
||||
license_uris = Program.to([])
|
||||
license_hash = Program.to(0)
|
||||
series_number = Program.to(1)
|
||||
series_total = Program.to(1)
|
||||
# Set metadata
|
||||
for kv_pair in metadata.as_iter():
|
||||
if kv_pair.first().as_atom() == b"u":
|
||||
data_uris = kv_pair.rest()
|
||||
if kv_pair.first().as_atom() == b"h":
|
||||
data_hash = kv_pair.rest()
|
||||
|
||||
if kv_pair.first().as_atom() == b"mu":
|
||||
meta_uris = kv_pair.rest()
|
||||
if kv_pair.first().as_atom() == b"mh":
|
||||
meta_hash = kv_pair.rest()
|
||||
if kv_pair.first().as_atom() == b"lu":
|
||||
license_uris = kv_pair.rest()
|
||||
if kv_pair.first().as_atom() == b"lh":
|
||||
license_hash = kv_pair.rest()
|
||||
if kv_pair.first().as_atom() == b"sn":
|
||||
series_number = kv_pair.rest()
|
||||
if kv_pair.first().as_atom() == b"st":
|
||||
series_total = kv_pair.rest()
|
||||
current_did = None
|
||||
pubkey = None
|
||||
transfer_program_mod = None
|
||||
@ -137,6 +164,12 @@ class UncurriedNFT:
|
||||
data_hash=data_hash,
|
||||
p2_puzzle=p2_puzzle,
|
||||
metadata_updater_hash=metadata_updater_hash,
|
||||
meta_uris=meta_uris,
|
||||
meta_hash=meta_hash,
|
||||
license_uris=license_uris,
|
||||
license_hash=license_hash,
|
||||
series_number=series_number,
|
||||
series_total=series_total,
|
||||
inner_puzzle=inner_puzzle,
|
||||
# TODO: Set/Remove following fields after NFT1 implemented
|
||||
owner_did=current_did,
|
||||
|
1
chia/wallet/puzzles/counter.clvm.hex
Normal file
1
chia/wallet/puzzles/counter.clvm.hex
Normal file
@ -0,0 +1 @@
|
||||
ff02ffff01ff04ffff04ff38ffff04ffff02ff3affff04ff02ffff04ff05ffff04ffff02ff2effff04ff02ffff04ff17ffff04ffff02ff26ffff04ff02ffff04ff2fff80808080ff8080808080ff8080808080ffff04ff27ff80808080ffff02ff32ffff04ff02ffff04ff0bffff04ff2fffff01ff01808080808080ffff04ffff01ffffff32ff0233ffff0401ff0102ffffffff02ffff03ff05ffff01ff02ff22ffff04ff02ffff04ff0dffff04ffff0bff3cffff0bff34ff2480ffff0bff3cffff0bff3cffff0bff34ff2c80ff0980ffff0bff3cff0bffff0bff34ff8080808080ff8080808080ffff010b80ff0180ff02ffff03ff0bffff01ff04ffff04ff38ffff04ffff02ff2affff04ff02ffff04ff05ffff04ffff04ffff04ff17ff1380ff8080ff8080808080ffff01ff80808080ffff04ffff04ff10ffff04ff23ffff04ffff02ff3effff04ff02ffff04ffff04ff13ffff04ff17ff808080ff80808080ff80808080ffff02ff32ffff04ff02ffff04ff05ffff04ff1bffff04ffff10ff17ffff010180ff8080808080808080ff8080ff0180ffff02ff36ffff04ff02ffff04ff05ffff04ffff02ff3effff04ff02ffff04ff0bff80808080ff8080808080ff02ff36ffff04ff02ffff04ff05ffff04ffff02ff3effff04ff02ffff04ff0bff80808080ffff04ffff02ff3effff04ff02ffff04ff05ff80808080ff808080808080ffffff02ffff03ffff07ff0580ffff01ff04ff29ffff02ff26ffff04ff02ffff04ff0dff8080808080ff8080ff0180ff0bff3cffff0bff34ff2880ffff0bff3cffff0bff3cffff0bff34ff2c80ff0580ffff0bff3cffff02ff22ffff04ff02ffff04ff07ffff04ffff0bff34ff3480ff8080808080ffff0bff34ff8080808080ffff02ffff03ffff07ff0b80ffff01ff10ff13ffff02ff2effff04ff02ffff04ff05ffff04ff1bff808080808080ffff011580ff0180ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff3effff04ff02ffff04ff09ff80808080ffff02ff3effff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080
|
1
chia/wallet/puzzles/index_lock.clvm.hex
Normal file
1
chia/wallet/puzzles/index_lock.clvm.hex
Normal file
@ -0,0 +1 @@
|
||||
ff02ffff01ff04ffff04ffff013effff04ffff02ff02ffff04ff02ffff04ff05ff80808080ff808080ff8080ffff04ffff01ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff02ffff04ff02ffff04ff09ff80808080ffff02ff02ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080
|
@ -6,7 +6,7 @@ import pathlib
|
||||
|
||||
import pkg_resources
|
||||
from chia.types.blockchain_format.program import Program, SerializedProgram
|
||||
from chia.util.lock import with_lock
|
||||
from chia.util.lock import lock_by_path
|
||||
from clvm_tools_rs import compile_clvm as compile_clvm_rust
|
||||
|
||||
|
||||
@ -67,11 +67,9 @@ def compile_clvm_in_lock(full_path, output, search_paths):
|
||||
|
||||
|
||||
def compile_clvm(full_path, output, search_paths=[]):
|
||||
def do_compile():
|
||||
with lock_by_path(f"{full_path}.lock"):
|
||||
compile_clvm_in_lock(full_path, output, search_paths)
|
||||
|
||||
with_lock(f"{full_path}.lock", do_compile)
|
||||
|
||||
|
||||
def load_serialized_clvm(clvm_filename, package_or_requirement=__name__) -> SerializedProgram:
|
||||
"""
|
||||
|
@ -1,21 +1,30 @@
|
||||
(mod (CURRENT_METADATA METADATA_UPDATER_PUZZLE_HASH solution)
|
||||
(mod (CURRENT_METADATA METADATA_UPDATER_PUZZLE_HASH (key . new_url))
|
||||
|
||||
; METADATA and METADATA_UPDATER_PUZZLE_HASH are passed in as truths from the layer above
|
||||
|
||||
; This program returns ((new_metadata new_metadata_updater_puzhash) conditions)
|
||||
|
||||
; once we find 'u' we don't need to continue looping
|
||||
(defun add_url (METADATA new_url)
|
||||
; Add uri to a field
|
||||
(defun add_url (METADATA key new_url)
|
||||
(if METADATA
|
||||
(if (= (f (f METADATA)) 'u')
|
||||
(c (c 'u' (c new_url (r (f METADATA)))) (r METADATA))
|
||||
(c (f METADATA) (add_url (r METADATA) new_url))
|
||||
(if (= (f (f METADATA)) key)
|
||||
(c (c key (c new_url (r (f METADATA)))) (r METADATA))
|
||||
(c (f METADATA) (add_url (r METADATA) key new_url))
|
||||
)
|
||||
()
|
||||
)
|
||||
)
|
||||
|
||||
; main
|
||||
; returns ((new_metadata new_metadata_updater_puzhash) conditions)
|
||||
(list (list (if solution (add_url CURRENT_METADATA solution) CURRENT_METADATA) METADATA_UPDATER_PUZZLE_HASH) 0)
|
||||
(list
|
||||
(list
|
||||
(if (all key new_url)
|
||||
(if (any (= key "mu") (= key "lu") (= key "u"))
|
||||
(add_url CURRENT_METADATA key new_url)
|
||||
CURRENT_METADATA
|
||||
)
|
||||
CURRENT_METADATA
|
||||
)
|
||||
METADATA_UPDATER_PUZZLE_HASH)
|
||||
0
|
||||
)
|
||||
)
|
||||
|
@ -1 +1 @@
|
||||
ff02ffff01ff04ffff04ffff02ffff03ff17ffff01ff02ff02ffff04ff02ffff04ff05ffff04ff17ff8080808080ffff010580ff0180ffff04ff0bff808080ffff01ff808080ffff04ffff01ff02ffff03ff05ffff01ff02ffff03ffff09ff11ffff017580ffff01ff04ffff04ffff0175ffff04ff0bff198080ff0d80ffff01ff04ff09ffff02ff02ffff04ff02ffff04ff0dffff04ff0bff80808080808080ff0180ff8080ff0180ff018080
|
||||
ff02ffff01ff04ffff04ffff02ffff03ffff22ff27ff3780ffff01ff02ffff03ffff21ffff09ff27ffff01826d7580ffff09ff27ffff01826c7580ffff09ff27ffff01758080ffff01ff02ff02ffff04ff02ffff04ff05ffff04ff27ffff04ff37ff808080808080ffff010580ff0180ffff010580ff0180ffff04ff0bff808080ffff01ff808080ffff04ffff01ff02ffff03ff05ffff01ff02ffff03ffff09ff11ff0b80ffff01ff04ffff04ff0bffff04ff17ff198080ff0d80ffff01ff04ff09ffff02ff02ffff04ff02ffff04ff0dffff04ff0bffff04ff17ff8080808080808080ff0180ff8080ff0180ff018080
|
||||
|
@ -1 +1 @@
|
||||
81970d352e6a39a241eaf8ca510a0e669e40d778ba612621c60a50ef6cf29c7b
|
||||
fe8a4b4e27a2e29a4d3fc7ce9d527adbcaccbab6ada3903ccf3ba9a769d2d78b
|
||||
|
@ -2,6 +2,7 @@ import asyncio
|
||||
import logging
|
||||
import random
|
||||
from typing import List, Optional, Tuple, Union, Dict
|
||||
from chia_rs import compute_merkle_set_root
|
||||
|
||||
from chia.consensus.constants import ConsensusConstants
|
||||
from chia.protocols import wallet_protocol
|
||||
@ -86,14 +87,14 @@ def validate_additions(
|
||||
):
|
||||
if proofs is None:
|
||||
# Verify root
|
||||
additions_merkle_set = MerkleSet()
|
||||
additions_merkle_items: List[bytes32] = []
|
||||
|
||||
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
|
||||
for puzzle_hash, coins_l in coins:
|
||||
additions_merkle_set.add_already_hashed(puzzle_hash)
|
||||
additions_merkle_set.add_already_hashed(hash_coin_ids([c.name() for c in coins_l]))
|
||||
additions_merkle_items.append(puzzle_hash)
|
||||
additions_merkle_items.append(hash_coin_ids([c.name() for c in coins_l]))
|
||||
|
||||
additions_root = additions_merkle_set.get_root()
|
||||
additions_root = bytes32(compute_merkle_set_root(additions_merkle_items))
|
||||
if root != additions_root:
|
||||
return False
|
||||
else:
|
||||
|
@ -7,15 +7,16 @@ from blspy import G1Element
|
||||
from chia.consensus.cost_calculator import NPCResult
|
||||
from chia.full_node.bundle_tools import simple_solution_generator
|
||||
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
|
||||
from chia.types.announcement import Announcement
|
||||
from chia.types.blockchain_format.coin import Coin
|
||||
from chia.types.blockchain_format.program import Program, SerializedProgram
|
||||
from chia.types.announcement import Announcement
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.types.coin_spend import CoinSpend
|
||||
from chia.types.generator_types import BlockGenerator
|
||||
from chia.types.spend_bundle import SpendBundle
|
||||
from chia.util.ints import uint8, uint32, uint64, uint128
|
||||
from chia.util.hash import std_hash
|
||||
from chia.util.ints import uint8, uint32, uint64, uint128
|
||||
from chia.wallet.coin_selection import select_coins
|
||||
from chia.wallet.derivation_record import DerivationRecord
|
||||
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
|
||||
DEFAULT_HIDDEN_PUZZLE_HASH,
|
||||
@ -24,23 +25,23 @@ from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
|
||||
solution_for_conditions,
|
||||
)
|
||||
from chia.wallet.puzzles.puzzle_utils import (
|
||||
make_assert_coin_announcement,
|
||||
make_assert_puzzle_announcement,
|
||||
make_assert_my_coin_id_condition,
|
||||
make_assert_absolute_seconds_exceeds_condition,
|
||||
make_assert_coin_announcement,
|
||||
make_assert_my_coin_id_condition,
|
||||
make_assert_puzzle_announcement,
|
||||
make_create_coin_announcement,
|
||||
make_create_puzzle_announcement,
|
||||
make_create_coin_condition,
|
||||
make_create_puzzle_announcement,
|
||||
make_reserve_fee_condition,
|
||||
)
|
||||
from chia.wallet.secret_key_store import SecretKeyStore
|
||||
from chia.wallet.sign_coin_spends import sign_coin_spends
|
||||
from chia.wallet.transaction_record import TransactionRecord
|
||||
from chia.wallet.util.compute_memos import compute_memos
|
||||
from chia.wallet.util.transaction_type import TransactionType
|
||||
from chia.wallet.util.wallet_types import WalletType, AmountWithPuzzlehash
|
||||
from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
|
||||
from chia.wallet.wallet_coin_record import WalletCoinRecord
|
||||
from chia.wallet.wallet_info import WalletInfo
|
||||
from chia.wallet.util.compute_memos import compute_memos
|
||||
|
||||
|
||||
class Wallet:
|
||||
@ -64,7 +65,7 @@ class Wallet:
|
||||
self.cost_of_single_tx = None
|
||||
return self
|
||||
|
||||
async def get_max_send_amount(self, records=None):
|
||||
async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> int:
|
||||
spendable: List[WalletCoinRecord] = list(
|
||||
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
|
||||
)
|
||||
@ -76,6 +77,7 @@ class Wallet:
|
||||
tx = await self.generate_signed_transaction(
|
||||
coin.amount, coin.puzzle_hash, coins={coin}, ignore_max_send_amount=True
|
||||
)
|
||||
assert tx.spend_bundle is not None
|
||||
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
|
||||
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
|
||||
result: NPCResult = get_name_puzzle_conditions(
|
||||
@ -179,8 +181,8 @@ class Wallet:
|
||||
public_key = await self.hack_populate_secret_key_for_puzzle_hash(puzzle_hash)
|
||||
return puzzle_for_pk(bytes(public_key))
|
||||
|
||||
async def get_new_puzzle(self) -> Program:
|
||||
dr = await self.wallet_state_manager.get_unused_derivation_record(self.id())
|
||||
async def get_new_puzzle(self, in_transaction: bool = False) -> Program:
|
||||
dr = await self.wallet_state_manager.get_unused_derivation_record(self.id(), in_transaction=in_transaction)
|
||||
return puzzle_for_pk(bytes(dr.pubkey))
|
||||
|
||||
async def get_puzzle_hash(self, new: bool) -> bytes32:
|
||||
@ -244,59 +246,37 @@ class Wallet:
|
||||
python_program[1].append(condition)
|
||||
return Program.to(python_program)
|
||||
|
||||
async def select_coins(self, amount, exclude: List[Coin] = None) -> Set[Coin]:
|
||||
async def select_coins(
|
||||
self, amount: uint64, exclude: List[Coin] = None, min_coin_amount: Optional[uint128] = None
|
||||
) -> Set[Coin]:
|
||||
"""
|
||||
Returns a set of coins that can be used for generating a new transaction.
|
||||
Note: This must be called under a wallet state manager lock
|
||||
Note: Must be called under wallet state manager lock
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
spendable_amount = await self.get_spendable_balance()
|
||||
|
||||
if amount > spendable_amount:
|
||||
error_msg = (
|
||||
f"Can't select amount higher than our spendable balance. Amount: {amount}, spendable: "
|
||||
f" {spendable_amount}"
|
||||
)
|
||||
self.log.warning(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
self.log.info(f"About to select coins for amount {amount}")
|
||||
unspent: List[WalletCoinRecord] = list(
|
||||
spendable_amount: uint128 = await self.get_spendable_balance()
|
||||
spendable_coins: List[WalletCoinRecord] = list(
|
||||
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id())
|
||||
)
|
||||
sum_value = 0
|
||||
used_coins: Set = set()
|
||||
|
||||
# Use older coins first
|
||||
unspent.sort(reverse=True, key=lambda r: r.coin.amount)
|
||||
|
||||
# Try to use coins from the store, if there isn't enough of "unused"
|
||||
# coins use change coins that are not confirmed yet
|
||||
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
|
||||
self.id()
|
||||
)
|
||||
for coinrecord in unspent:
|
||||
if sum_value >= amount and len(used_coins) > 0:
|
||||
break
|
||||
if coinrecord.coin.name() in unconfirmed_removals:
|
||||
continue
|
||||
if coinrecord.coin in exclude:
|
||||
continue
|
||||
sum_value += coinrecord.coin.amount
|
||||
used_coins.add(coinrecord.coin)
|
||||
self.log.debug(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
|
||||
|
||||
# This happens when we couldn't use one of the coins because it's already used
|
||||
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
|
||||
if sum_value < amount:
|
||||
raise ValueError(
|
||||
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
|
||||
)
|
||||
|
||||
self.log.debug(f"Successfully selected coins: {used_coins}")
|
||||
return used_coins
|
||||
coins = await select_coins(
|
||||
spendable_amount,
|
||||
self.wallet_state_manager.constants.MAX_COIN_AMOUNT,
|
||||
spendable_coins,
|
||||
unconfirmed_removals,
|
||||
self.log,
|
||||
uint128(amount),
|
||||
exclude,
|
||||
min_coin_amount,
|
||||
)
|
||||
assert coins is not None and len(coins) > 0
|
||||
assert sum(c.amount for c in coins) >= amount
|
||||
return coins
|
||||
|
||||
async def _generate_unsigned_transaction(
|
||||
self,
|
||||
@ -332,7 +312,7 @@ class Wallet:
|
||||
raise ValueError(f"Can't send more than {max_send} in a single transaction")
|
||||
self.log.debug("Got back max send amount: %s", max_send)
|
||||
if coins is None:
|
||||
coins = await self.select_coins(total_amount)
|
||||
coins = await self.select_coins(uint64(total_amount))
|
||||
assert len(coins) > 0
|
||||
self.log.info(f"coins is not None {coins}")
|
||||
spend_value = sum([coin.amount for coin in coins])
|
||||
@ -515,9 +495,9 @@ class Wallet:
|
||||
# If we're losing value then get coins with at least that much value
|
||||
# If we're gaining value then our amount doesn't matter
|
||||
if chia_amount < 0:
|
||||
utxos = await self.select_coins(abs(chia_amount), exclude)
|
||||
utxos = await self.select_coins(uint64(abs(chia_amount)), exclude)
|
||||
else:
|
||||
utxos = await self.select_coins(0, exclude)
|
||||
utxos = await self.select_coins(uint64(0), exclude)
|
||||
|
||||
assert len(utxos) > 0
|
||||
|
||||
|
@ -4,7 +4,6 @@ import logging
|
||||
import random
|
||||
import time
|
||||
import traceback
|
||||
from asyncio import CancelledError
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Iterator, List, Optional, Set, Tuple
|
||||
|
||||
@ -141,6 +140,8 @@ class WalletNode:
|
||||
self.validation_semaphore = None
|
||||
self.local_node_synced = False
|
||||
self.LONG_SYNC_THRESHOLD = 200
|
||||
self.last_wallet_tx_resend_time: int = 0
|
||||
self.wallet_tx_resend_timeout_secs: int = 1800 # Duration in seconds
|
||||
|
||||
async def ensure_keychain_proxy(self) -> KeychainProxy:
|
||||
if self.keychain_proxy is None:
|
||||
@ -230,6 +231,8 @@ class WalletNode:
|
||||
if self.state_changed_callback is not None:
|
||||
self.wallet_state_manager.set_callback(self.state_changed_callback)
|
||||
|
||||
self.last_wallet_tx_resend_time = int(time.time())
|
||||
self.wallet_tx_resend_timeout_secs = self.config.get("tx_resend_timeout_secs", 60 * 60)
|
||||
self.wallet_state_manager.set_pending_callback(self._pending_tx_handler)
|
||||
self._shut_down = False
|
||||
self._process_new_subscriptions_task = asyncio.create_task(self._process_new_subscriptions())
|
||||
@ -332,7 +335,14 @@ class WalletNode:
|
||||
return []
|
||||
messages: List[Tuple[Message, Set[bytes32]]] = []
|
||||
|
||||
records: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_not_sent()
|
||||
current_time = int(time.time())
|
||||
retry_accepted_txs = False
|
||||
if self.last_wallet_tx_resend_time < current_time - self.wallet_tx_resend_timeout_secs:
|
||||
self.last_wallet_tx_resend_time = current_time
|
||||
retry_accepted_txs = True
|
||||
records: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_not_sent(
|
||||
include_accepted_txs=retry_accepted_txs
|
||||
)
|
||||
|
||||
for record in records:
|
||||
if record.spend_bundle is None:
|
||||
@ -395,7 +405,7 @@ class WalletNode:
|
||||
await self.new_peak_wallet(request, peer)
|
||||
else:
|
||||
assert False
|
||||
except CancelledError:
|
||||
except asyncio.CancelledError:
|
||||
self.log.info("Queue task cancelled, exiting.")
|
||||
raise
|
||||
except Exception as e:
|
||||
@ -1049,7 +1059,9 @@ class WalletNode:
|
||||
|
||||
if peer.peer_node_id in self.synced_peers:
|
||||
await self.wallet_state_manager.blockchain.set_finished_sync_up_to(new_peak.height)
|
||||
await self.wallet_state_manager.new_peak(new_peak)
|
||||
|
||||
async with self.wallet_state_manager.lock:
|
||||
await self.wallet_state_manager.new_peak(new_peak)
|
||||
|
||||
async def wallet_short_sync_backtrack(self, header_block: HeaderBlock, peer: WSChiaConnection) -> int:
|
||||
assert self.wallet_state_manager is not None
|
||||
|
@ -326,7 +326,7 @@ class WalletStateManager:
|
||||
if unused > 0:
|
||||
await self.puzzle_store.set_used_up_to(uint32(unused - 1), in_transaction)
|
||||
|
||||
async def update_wallet_puzzle_hashes(self, wallet_id):
|
||||
async def update_wallet_puzzle_hashes(self, wallet_id, in_transaction=False):
|
||||
derivation_paths: List[DerivationRecord] = []
|
||||
target_wallet = self.wallets[wallet_id]
|
||||
last: Optional[uint32] = await self.puzzle_store.get_last_derivation_path_for_wallet(wallet_id)
|
||||
@ -353,7 +353,7 @@ class WalletStateManager:
|
||||
False,
|
||||
)
|
||||
)
|
||||
await self.puzzle_store.add_derivation_paths(derivation_paths)
|
||||
await self.puzzle_store.add_derivation_paths(derivation_paths, in_transaction=in_transaction)
|
||||
|
||||
async def get_unused_derivation_record(
|
||||
self, wallet_id: uint32, in_transaction=False, hardened=False
|
||||
@ -1397,6 +1397,10 @@ class WalletStateManager:
|
||||
for wallet_id, wallet in self.wallets.items():
|
||||
if wallet.type() == uint8(WalletType.POOLING_WALLET):
|
||||
await wallet.new_peak(peak.height)
|
||||
current_time = int(time.time())
|
||||
|
||||
if self.wallet_node.last_wallet_tx_resend_time < current_time - self.wallet_node.wallet_tx_resend_timeout_secs:
|
||||
self.tx_pending_changed()
|
||||
|
||||
async def add_interested_puzzle_hashes(
|
||||
self, puzzle_hashes: List[bytes32], wallet_ids: List[int], in_transaction: bool = False
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user