Merge commit 'd1e445fac2e503ea230acf74ec24a1bb9c529aee' into atari-merge_main_d1e445fac2e503ea230acf74ec24a1bb9c529aee

This commit is contained in:
Kyle Altendorf 2022-05-13 16:08:17 -04:00
commit 259a2ce0ea
No known key found for this signature in database
GPG Key ID: 5715D880FF005192
189 changed files with 3846 additions and 2263 deletions

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS blockchain Tests
name: MacOS blockchain Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test blockchain code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/blockchain/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/blockchain/test_blockchain.py tests/blockchain/test_blockchain_transactions.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS clvm Tests
name: MacOS clvm Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test clvm code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/clvm/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/clvm/test_chialisp_deserialization.py tests/clvm/test_clvm_compilation.py tests/clvm/test_clvm_step.py tests/clvm/test_program.py tests/clvm/test_puzzle_compression.py tests/clvm/test_puzzles.py tests/clvm/test_serialized_program.py tests/clvm/test_singletons.py tests/clvm/test_spend_sim.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-cmds Tests
name: MacOS core-cmds Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test core-cmds code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/cmds/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/cmds/test_keys.py tests/core/cmds/test_wallet.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-consensus Tests
name: MacOS core-consensus Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test core-consensus code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/consensus/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/consensus/test_pot_iterations.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-custom_types Tests
name: MacOS core-custom_types Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test core-custom_types code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/custom_types/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/custom_types/test_coin.py tests/core/custom_types/test_proof_of_space.py tests/core/custom_types/test_spend_bundle.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-daemon Tests
name: MacOS core-daemon Test
on:
push:
@ -99,7 +99,7 @@ jobs:
- name: Test core-daemon code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/daemon/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/core/daemon/test_daemon.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-full_node-full_sync Tests
name: MacOS core-full_node-full_sync Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core-full_node-full_sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/full_sync/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/full_sync/test_full_sync.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-full_node-stores Tests
name: MacOS core-full_node-stores Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core-full_node-stores code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/stores/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/stores/test_block_store.py tests/core/full_node/stores/test_coin_store.py tests/core/full_node/stores/test_full_node_store.py tests/core/full_node/stores/test_hint_store.py tests/core/full_node/stores/test_sync_store.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-full_node Tests
name: MacOS core-full_node Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core-full_node code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/test_address_manager.py tests/core/full_node/test_block_height_map.py tests/core/full_node/test_conditions.py tests/core/full_node/test_full_node.py tests/core/full_node/test_generator_tools.py tests/core/full_node/test_hint_management.py tests/core/full_node/test_mempool.py tests/core/full_node/test_mempool_performance.py tests/core/full_node/test_node_load.py tests/core/full_node/test_peer_store_resolver.py tests/core/full_node/test_performance.py tests/core/full_node/test_transactions.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-server Tests
name: MacOS core-server Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core-server code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/server/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/core/server/test_dos.py tests/core/server/test_rate_limits.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-ssl Tests
name: MacOS core-ssl Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core-ssl code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/ssl/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/ssl/test_ssl.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-util Tests
name: MacOS core-util Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core-util code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/util/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/util/test_cached_bls.py tests/core/util/test_config.py tests/core/util/test_db_wrapper.py tests/core/util/test_file_keyring_synchronization.py tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py tests/core/util/test_lru_cache.py tests/core/util/test_significant_bits.py tests/core/util/test_streamable.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core Tests
name: MacOS core Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test core code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/test_coins.py tests/core/test_cost_calculation.py tests/core/test_crawler_rpc.py tests/core/test_daemon_rpc.py tests/core/test_db_conversion.py tests/core/test_db_validation.py tests/core/test_farmer_harvester_rpc.py tests/core/test_filter.py tests/core/test_full_node_rpc.py tests/core/test_merkle_set.py tests/core/test_setproctitle.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS farmer_harvester Tests
name: MacOS farmer_harvester Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test farmer_harvester code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/farmer_harvester/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/farmer_harvester/test_farmer_harvester.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS generator Tests
name: MacOS generator Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test generator code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/generator/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py tests/generator/test_rom.py tests/generator/test_scan.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS plot_sync Tests
name: MacOS plot_sync Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test plot_sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plot_sync/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/plot_sync/test_delta.py tests/plot_sync/test_plot_sync.py tests/plot_sync/test_receiver.py tests/plot_sync/test_sender.py tests/plot_sync/test_sync_simulated.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS plotting Tests
name: MacOS plotting Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test plotting code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plotting/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/plotting/test_plot_manager.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS pools Tests
name: MacOS pools Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test pools code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/pools/test_*.py --durations=10 -n 2 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 2 -m "not benchmark" tests/pools/test_pool_cmdline.py tests/pools/test_pool_config.py tests/pools/test_pool_puzzles_lifecycle.py tests/pools/test_pool_rpc.py tests/pools/test_pool_wallet.py tests/pools/test_wallet_pool_store.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS simulation Tests
name: MacOS simulation Test
on:
push:
@ -99,7 +99,7 @@ jobs:
- name: Test simulation code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/simulation/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/simulation/test_simulation.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS tools Tests
name: MacOS tools Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test tools code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/tools/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/tools/test_full_sync.py tests/tools/test_run_block.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS util Tests
name: MacOS util Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test util code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/util/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/util/test_chunks.py tests/util/test_full_block_utils.py tests/util/test_lock_queue.py tests/util/test_network_protocol_files.py tests/util/test_paginator.py tests/util/test_struct_stream.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet-cat_wallet Tests
name: MacOS wallet-cat_wallet Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test wallet-cat_wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/cat_wallet/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/cat_wallet/test_cat_lifecycle.py tests/wallet/cat_wallet/test_cat_wallet.py tests/wallet/cat_wallet/test_offer_lifecycle.py tests/wallet/cat_wallet/test_trades.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet-did_wallet Tests
name: MacOS wallet-did_wallet Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test wallet-did_wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/did_wallet/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/did_wallet/test_did.py tests/wallet/did_wallet/test_did_rpc.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet-rl_wallet Tests
name: MacOS wallet-rl_wallet Test
on:
push:
@ -81,7 +81,7 @@ jobs:
- name: Test wallet-rl_wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rl_wallet/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/rl_wallet/test_rl_rpc.py tests/wallet/rl_wallet/test_rl_wallet.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet-rpc Tests
name: MacOS wallet-rpc Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test wallet-rpc code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rpc/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/rpc/test_wallet_rpc.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet-simple_sync Tests
name: MacOS wallet-simple_sync Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test wallet-simple_sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/simple_sync/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/simple_sync/test_simple_sync_protocol.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet-sync Tests
name: MacOS wallet-sync Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test wallet-sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/sync/test_*.py --durations=10 -n 0 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" tests/wallet/sync/test_wallet_sync.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS wallet Tests
name: MacOS wallet Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/wallet/test_bech32m.py tests/wallet/test_chialisp.py tests/wallet/test_puzzle_store.py tests/wallet/test_singleton.py tests/wallet/test_singleton_lifecycle.py tests/wallet/test_singleton_lifecycle_fast.py tests/wallet/test_taproot.py tests/wallet/test_wallet.py tests/wallet/test_wallet_blockchain.py tests/wallet/test_wallet_interested_store.py tests/wallet/test_wallet_key_val_store.py tests/wallet/test_wallet_store.py tests/wallet/test_wallet_user_store.py
- name: Process coverage data
run: |

View File

@ -1,7 +1,7 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS weight_proof Tests
name: MacOS weight_proof Test
on:
push:
@ -95,7 +95,7 @@ jobs:
- name: Test weight_proof code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/weight_proof/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/weight_proof/test_weight_proof.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test blockchain code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/blockchain/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/blockchain/test_blockchain.py tests/blockchain/test_blockchain_transactions.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test clvm code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/clvm/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/clvm/test_chialisp_deserialization.py tests/clvm/test_clvm_compilation.py tests/clvm/test_clvm_step.py tests/clvm/test_program.py tests/clvm/test_puzzle_compression.py tests/clvm/test_puzzles.py tests/clvm/test_serialized_program.py tests/clvm/test_singletons.py tests/clvm/test_spend_sim.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test core-cmds code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/cmds/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/cmds/test_keys.py tests/core/cmds/test_wallet.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test core-consensus code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/consensus/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/consensus/test_pot_iterations.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test core-custom_types code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/custom_types/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/custom_types/test_coin.py tests/core/custom_types/test_proof_of_space.py tests/core/custom_types/test_spend_bundle.py
- name: Process coverage data
run: |

View File

@ -98,7 +98,7 @@ jobs:
- name: Test core-daemon code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/daemon/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/core/daemon/test_daemon.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core-full_node-full_sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/full_sync/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/full_node/full_sync/test_full_sync.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core-full_node-stores code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/stores/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/stores/test_block_store.py tests/core/full_node/stores/test_coin_store.py tests/core/full_node/stores/test_full_node_store.py tests/core/full_node/stores/test_hint_store.py tests/core/full_node/stores/test_sync_store.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core-full_node code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/test_*.py --durations=10 -n 4 -m "not benchmark"
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" tests/core/full_node/test_address_manager.py tests/core/full_node/test_block_height_map.py tests/core/full_node/test_conditions.py tests/core/full_node/test_full_node.py tests/core/full_node/test_generator_tools.py tests/core/full_node/test_hint_management.py tests/core/full_node/test_mempool.py tests/core/full_node/test_mempool_performance.py tests/core/full_node/test_node_load.py tests/core/full_node/test_peer_store_resolver.py tests/core/full_node/test_performance.py tests/core/full_node/test_transactions.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core-server code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/server/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/core/server/test_dos.py tests/core/server/test_rate_limits.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core-ssl code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/ssl/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/ssl/test_ssl.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core-util code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/util/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/util/test_cached_bls.py tests/core/util/test_config.py tests/core/util/test_db_wrapper.py tests/core/util/test_file_keyring_synchronization.py tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py tests/core/util/test_lru_cache.py tests/core/util/test_significant_bits.py tests/core/util/test_streamable.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test core code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/core/test_coins.py tests/core/test_cost_calculation.py tests/core/test_crawler_rpc.py tests/core/test_daemon_rpc.py tests/core/test_db_conversion.py tests/core/test_db_validation.py tests/core/test_farmer_harvester_rpc.py tests/core/test_filter.py tests/core/test_full_node_rpc.py tests/core/test_merkle_set.py tests/core/test_setproctitle.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test farmer_harvester code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/farmer_harvester/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/farmer_harvester/test_farmer_harvester.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test generator code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/generator/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py tests/generator/test_rom.py tests/generator/test_scan.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test plot_sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plot_sync/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/plot_sync/test_delta.py tests/plot_sync/test_plot_sync.py tests/plot_sync/test_receiver.py tests/plot_sync/test_sender.py tests/plot_sync/test_sync_simulated.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test plotting code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plotting/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/plotting/test_plot_manager.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test pools code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/pools/test_*.py --durations=10 -n 2 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 2 -m "not benchmark" -p no:monitor tests/pools/test_pool_cmdline.py tests/pools/test_pool_config.py tests/pools/test_pool_puzzles_lifecycle.py tests/pools/test_pool_rpc.py tests/pools/test_pool_wallet.py tests/pools/test_wallet_pool_store.py
- name: Process coverage data
run: |

View File

@ -98,7 +98,7 @@ jobs:
- name: Test simulation code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/simulation/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/simulation/test_simulation.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test tools code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/tools/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/tools/test_full_sync.py tests/tools/test_run_block.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test util code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/util/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/util/test_chunks.py tests/util/test_full_block_utils.py tests/util/test_lock_queue.py tests/util/test_network_protocol_files.py tests/util/test_paginator.py tests/util/test_struct_stream.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test wallet-cat_wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/cat_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/cat_wallet/test_cat_lifecycle.py tests/wallet/cat_wallet/test_cat_wallet.py tests/wallet/cat_wallet/test_offer_lifecycle.py tests/wallet/cat_wallet/test_trades.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test wallet-did_wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/did_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/did_wallet/test_did.py tests/wallet/did_wallet/test_did_rpc.py
- name: Process coverage data
run: |

View File

@ -80,7 +80,7 @@ jobs:
- name: Test wallet-rl_wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rl_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/rl_wallet/test_rl_rpc.py tests/wallet/rl_wallet/test_rl_wallet.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test wallet-rpc code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rpc/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/rpc/test_wallet_rpc.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test wallet-simple_sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/simple_sync/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/simple_sync/test_simple_sync_protocol.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test wallet-sync code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/sync/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 0 -m "not benchmark" -p no:monitor tests/wallet/sync/test_wallet_sync.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test wallet code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/wallet/test_bech32m.py tests/wallet/test_chialisp.py tests/wallet/test_puzzle_store.py tests/wallet/test_singleton.py tests/wallet/test_singleton_lifecycle.py tests/wallet/test_singleton_lifecycle_fast.py tests/wallet/test_taproot.py tests/wallet/test_wallet.py tests/wallet/test_wallet_blockchain.py tests/wallet/test_wallet_interested_store.py tests/wallet/test_wallet_key_val_store.py tests/wallet/test_wallet_store.py tests/wallet/test_wallet_user_store.py
- name: Process coverage data
run: |

View File

@ -94,7 +94,7 @@ jobs:
- name: Test weight_proof code with pytest
run: |
. ./activate
venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/weight_proof/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor
venv/bin/coverage run --rcfile=.coveragerc --module pytest --durations=10 -n 4 -m "not benchmark" -p no:monitor tests/weight_proof/test_weight_proof.py
- name: Process coverage data
run: |

View File

@ -22,6 +22,10 @@ jobs:
name: Windows 10 Installer
runs-on: [windows-2019]
timeout-minutes: 50
strategy:
fail-fast: false
matrix:
python-version: [3.9]
steps:
- name: Checkout Code
@ -60,11 +64,10 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
# We can't upgrade to Python 3.8 until we have a miniupnpc binary
- uses: actions/setup-python@v2
name: Install Python 3.9
name: Install Python ${{ matrix.python-version }}
with:
python-version: "3.9"
python-version: ${{ matrix.python-version }}
- name: Setup Node 16.x
uses: actions/setup-node@v3
@ -145,6 +148,12 @@ jobs:
Expand-Archive -Path "$env:GITHUB_WORKSPACE\bladebit\bladebit.zip" -DestinationPath "$env:GITHUB_WORKSPACE\bladebit\"
rm "$env:GITHUB_WORKSPACE\bladebit\bladebit.zip"
- name: Run install script
env:
INSTALL_PYTHON_VERSION: ${{ matrix.python-version }}
run: |
.\Install.ps1 -d
- name: Build Windows installer with build_scripts\build_windows.ps1
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
@ -154,18 +163,20 @@ jobs:
$env:path="C:\Program` Files` (x86)\Microsoft` Visual` Studio\2019\Enterprise\SDK\ScopeCppSDK\vc15\VC\bin\;$env:path"
$env:path="C:\Program` Files` (x86)\Windows` Kits\10\App` Certification` Kit;$env:path"
git -C .\chia-blockchain-gui status
.\build_scripts\build_windows.ps1
.\venv\Scripts\Activate.ps1
cd .\build_scripts
.\build_windows.ps1
- name: Upload Windows exe's to artifacts
uses: actions/upload-artifact@v3
with:
name: chia-installers-windows-exe-intel
name: Windows-Exe
path: ${{ github.workspace }}\chia-blockchain-gui\Chia-win32-x64\
- name: Upload Installer to artifacts
uses: actions/upload-artifact@v3
with:
name: Windows-Installers
name: chia-installers-windows-exe-intel
path: ${{ github.workspace }}\chia-blockchain-gui\release-builds\
- name: Install AWS CLI

View File

@ -3,7 +3,9 @@ name: Check Dependency Artifacts
on:
push:
branches:
- 'long_lived/**'
- main
- 'release/**'
tags:
- '**'
pull_request:
@ -33,6 +35,10 @@ jobs:
runs-on:
intel: macos-latest
arm: [macos, arm64]
- name: Windows
matrix: windows
runs-on:
intel: windows-latest
arch:
- name: ARM64
matrix: arm
@ -48,6 +54,10 @@ jobs:
arch:
matrix: arm
python-version: '3.8'
- os:
matrix: windows
arch:
matrix: arm
steps:
- uses: Chia-Network/actions/clean-workspace@main

21
.github/workflows/conflict-check.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: "Conflict Check"
on:
# So that PRs touching the same files as the push are updated
push:
# So that the `dirtyLabel` is removed if conflicts are resolve
# We recommend `pull_request_target` so that github secrets are available.
# In `pull_request` we wouldn't be able to change labels of fork PRs
pull_request_target:
types: [synchronize]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: check if prs are behind main
uses: eps1lon/actions-label-merge-conflict@v2.0.0
with:
dirtyLabel: "merge_conflict"
repoToken: "${{ secrets.GITHUB_TOKEN }}"
commentOnDirty: "This pull request has conflicts, please resolve those before we can evaluate the pull request."
commentOnClean: "Conflicts have been resolved. A maintainer will review the pull request shortly."

View File

@ -4,9 +4,6 @@ profile=black
skip_gitignore=true
# venv/bin/isort --check . |& sed -n "s;ERROR: ${PWD}/\(.*\) Imports are.*; \1;p" | sort | uniq
extend_skip=
benchmarks/block_store.py
benchmarks/coin_store.py
benchmarks/utils.py
chia/clvm/spend_sim.py
chia/cmds/chia.py
chia/cmds/data_funcs.py
@ -41,8 +38,6 @@ extend_skip=
chia/data_layer/data_store.py
chia/data_layer/dl_wallet_store.py
chia/data_layer/download_data.py
chia/farmer/farmer_api.py
chia/farmer/farmer.py
chia/full_node/block_height_map.py
chia/full_node/block_store.py
chia/full_node/bundle_tools.py
@ -55,26 +50,18 @@ extend_skip=
chia/full_node/mempool_check_conditions.py
chia/full_node/mempool_manager.py
chia/full_node/weight_proof.py
chia/harvester/harvester_api.py
chia/harvester/harvester.py
chia/introducer/introducer.py
chia/plotters/bladebit.py
chia/plotters/chiapos.py
chia/plotters/install_plotter.py
chia/plotters/madmax.py
chia/plotters/plotters.py
chia/plotting/check_plots.py
chia/plotting/create_plots.py
chia/plotting/manager.py
chia/plotting/util.py
chia/pools/pool_puzzles.py
chia/pools/pool_wallet_info.py
chia/pools/pool_wallet.py
chia/protocols/harvester_protocol.py
chia/protocols/pool_protocol.py
chia/protocols/protocol_state_machine.py
chia/rpc/data_layer_rpc_api.py
chia/rpc/farmer_rpc_client.py
chia/rpc/full_node_rpc_client.py
chia/rpc/rpc_client.py
chia/rpc/wallet_rpc_api.py
@ -161,7 +148,6 @@ extend_skip=
chia/wallet/wallet.py
chia/wallet/wallet_state_manager.py
chia/wallet/wallet_weight_proof_handler.py
installhelper.py
tests/blockchain/test_blockchain.py
tests/blockchain/test_blockchain_transactions.py
tests/block_tools.py
@ -215,8 +201,6 @@ extend_skip=
tests/generator/test_list_to_batches.py
tests/generator/test_rom.py
tests/generator/test_scan.py
tests/plotting/test_plot_manager.py
tests/plotting/util.py
tests/pools/test_pool_cmdline.py
tests/pools/test_pool_config.py
tests/pools/test_pool_puzzles_lifecycle.py
@ -244,7 +228,6 @@ extend_skip=
tests/wallet/did_wallet/test_did.py
tests/wallet/did_wallet/test_did_rpc.py
tests/wallet/rpc/test_dl_wallet_rpc.py
tests/wallet/rpc/test_wallet_rpc.py
tests/wallet/simple_sync/test_simple_sync_protocol.py
tests/wallet/test_singleton_lifecycle_fast.py
tests/wallet/test_singleton_lifecycle.py
@ -256,4 +239,3 @@ extend_skip=
tests/wallet_tools.py
tests/weight_proof/test_weight_proof.py
tools/analyze-chain.py
tools/run_block.py

View File

@ -60,7 +60,7 @@ repos:
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.942
rev: v0.950
hooks:
- id: mypy
additional_dependencies: [filelock, pytest, pytest-asyncio, types-aiofiles, types-click, types-setuptools, types-PyYAML]

View File

@ -37,21 +37,23 @@ In the event an emergency fix is required for the release version of Chia, membe
4. Chia Network's nodes on the public testnet are running the latest version `x.y.z`.
5. The `main` branch will have a long running `beta testnet` to allow previewing of changes.
6. Pull Request events may require a `beta testnet` review environment. At the moment this is at the discretion of the reviewer.
7. Hotfixes land in the release branch they fix, and all later versions. (This will be achieved by regularly merging from `1.0.x` to main).
7. Hotfixes land in the release branch they fix, and all later versions. (This will be achieved by regularly merging from `1.3.x` to main).
8. Hotfixes that are emergency fixes for a specific version will be merged into (???), and removed from down-stream branches. This allows future merges without issues.
9. Whoever develops a hotfix is also responsible for merging it into all later branches.
10. A release branch (e.g. `1.1.x`) will be cut prior to a release, in order to separate work that should go into the release from work going into the next major release (main branch). (This pre-release branch will also have a `beta testnet` spun up for preview).
10. A release branch (e.g. `1.3.x`) will be cut prior to a release, in order to separate work that should go into the release from work going into the next major release (main branch). (This pre-release branch will also have a `beta testnet` spun up for preview).
11. All Merge events will be squash merged.
## Run tests and linting
The first time the tests are run, BlockTools will create and persist many plots. These are used for creating
proofs of space during testing. The next time tests are run, this will not be necessary.
proofs of space during testing. The next time tests are run, this will not be necessary. Look at the pytest.ini file
to configure how the tests are run. For example, for more logging: change the log level from WARNING to INFO, change
`-n auto` to `-n 0` and set `log_cli = True`.
```bash
sh install.sh -d
. ./activate
pip install ".[dev]"
black chia tests && mypy chia tests && flake8 chia tests
black . && isort benchmarks build_scripts chia tests tools *.py && mypy && flake8 benchmarks build_scripts chia tests tools *.py
py.test tests -v --durations 0
```
@ -86,20 +88,18 @@ provided configuration with `pre-commit install`.
Pycharm is an amazing and beautiful python IDE that some of us use to work on this project.
If you combine it with python black and formatting on save, you will get a very efficient
workflow.
workflow. It's also especially efficient for git branching, cherry-picking, committing and pushing.
1. pip install black
2. Run blackd in a terminal
3. Install BlackConnect plugin
4. Set to run python black on save
5. Set line length to 120
6. Install the linters in the root directory
1. Run blackd in a terminal
2. Install BlackConnect plugin
3. Set to run python black on save
4. Set line length to 120
5. Install the linters in the root directory
## Testnets and review environments
With the launch of `1.0.0` we will begin running an official `testnet`.
Prior to the release of `1.1.0` there will be two running test nets. `testnet` and `transaction-beta-testnet`. The `transaction-beta-testnet` testnet will be a beta of the pending 1.1 release, which will enable transactions on the chia blockchain.
Following the release of `1.1.0`, the official `testnet` will include all changes that have been accepted to the current release branch.
The current official testnet is testnet10. Look at `chia/util/initial_config.yaml` to see the configuration parameters
for each testnet. Information on how to use the testnet can be found in the wiki.
Prior to proposing changes to `main`, proposers should consider if running a `beta testnet` review environment will make the reviewer more effective when evaluating a change.
Changes that impact the blockchain could require a review environment before acceptance into `main`. This is at the discretion of the reviewer.

View File

@ -72,6 +72,7 @@ async def main(db_path: Path):
)
peak = blockchain.get_peak()
assert peak is not None
timing = 0.0
for i in range(REPETITIONS):
block = BlockInfo(

View File

@ -1,35 +1,35 @@
import asyncio
import random
from time import monotonic
from pathlib import Path
from chia.full_node.block_store import BlockStore
import os
import random
import sys
from pathlib import Path
from time import monotonic
from benchmarks.utils import clvm_generator
from chia.util.db_wrapper import DBWrapper2
from chia.util.ints import uint128, uint64, uint32, uint8
from utils import (
rewards,
rand_hash,
setup_db,
rand_bytes,
rand_class_group_element,
rand_g1,
rand_g2,
rand_bytes,
rand_hash,
rand_vdf,
rand_vdf_proof,
rand_class_group_element,
rewards,
setup_db,
)
from chia.types.full_block import FullBlock
from benchmarks.utils import clvm_generator
from chia.consensus.block_record import BlockRecord
from chia.full_node.block_store import BlockStore
from chia.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo
from chia.types.blockchain_format.pool_target import PoolTarget
from chia.types.blockchain_format.program import SerializedProgram
from chia.types.blockchain_format.proof_of_space import ProofOfSpace
from chia.types.blockchain_format.reward_chain_block import RewardChainBlock
from chia.types.blockchain_format.pool_target import PoolTarget
from chia.types.blockchain_format.foliage import Foliage, FoliageTransactionBlock, TransactionsInfo, FoliageBlockData
from chia.types.blockchain_format.program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from chia.types.full_block import FullBlock
from chia.util.db_wrapper import DBWrapper2
from chia.util.ints import uint8, uint32, uint64, uint128
NUM_ITERS = 20000

View File

@ -1,17 +1,18 @@
import asyncio
import random
from time import monotonic
from pathlib import Path
from chia.full_node.coin_store import CoinStore
from typing import List, Tuple
import os
import random
import sys
from pathlib import Path
from time import monotonic
from typing import List, Tuple
from chia.util.db_wrapper import DBWrapper2
from chia.types.blockchain_format.sized_bytes import bytes32
from utils import rand_hash, rewards, setup_db
from chia.full_node.coin_store import CoinStore
from chia.types.blockchain_format.coin import Coin
from chia.util.ints import uint64, uint32
from utils import rewards, rand_hash, setup_db
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.db_wrapper import DBWrapper2
from chia.util.ints import uint32, uint64
NUM_ITERS = 200

View File

@ -1,28 +1,29 @@
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.util.ints import uint64, uint32, uint8
import os
import random
import subprocess
import sys
from datetime import datetime
from pathlib import Path
from typing import Tuple
import aiosqlite
import click
from blspy import AugSchemeMPL, G1Element, G2Element
from chia.consensus.coinbase import create_farmer_coin, create_pool_coin
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.types.blockchain_format.classgroup import ClassgroupElement
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32, bytes100
from chia.types.blockchain_format.vdf import VDFInfo, VDFProof
from chia.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo
from chia.types.blockchain_format.pool_target import PoolTarget
from chia.types.blockchain_format.program import SerializedProgram
from chia.types.blockchain_format.proof_of_space import ProofOfSpace
from chia.types.blockchain_format.reward_chain_block import RewardChainBlock
from chia.types.blockchain_format.sized_bytes import bytes32, bytes100
from chia.types.blockchain_format.vdf import VDFInfo, VDFProof
from chia.types.full_block import FullBlock
from chia.util.ints import uint128
from chia.util.db_wrapper import DBWrapper2
from typing import Tuple
from pathlib import Path
from datetime import datetime
import aiosqlite
import click
import os
import subprocess
import sys
import random
from blspy import G2Element, G1Element, AugSchemeMPL
from chia.util.ints import uint8, uint32, uint64, uint128
# farmer puzzle hash
ph = bytes32(b"a" * 32)

View File

@ -3,78 +3,16 @@
$ErrorActionPreference = "Stop"
mkdir build_scripts\win_build
Set-Location -Path ".\build_scripts\win_build" -PassThru
git status
Write-Output " ---"
Write-Output "curl miniupnpc"
Write-Output " ---"
# download.chia.net is the CDN url behind all the files that are actually on pypi.chia.net/simple now
Invoke-WebRequest -Uri "https://download.chia.net/simple/miniupnpc/miniupnpc-2.2.2-cp39-cp39-win_amd64.whl" -OutFile "miniupnpc-2.2.2-cp39-cp39-win_amd64.whl"
Write-Output "Using win_amd64 python 3.9 wheel from https://github.com/miniupnp/miniupnp/pull/475 (2.2.0-RC1)"
Write-Output "Actual build from https://github.com/miniupnp/miniupnp/commit/7783ac1545f70e3341da5866069bde88244dd848"
If ($LastExitCode -gt 0){
Throw "Failed to download miniupnpc!"
}
else
{
Set-Location -Path - -PassThru
Write-Output "miniupnpc download successful."
}
Write-Output " ---"
Write-Output "Create venv - python3.9 is required in PATH"
Write-Output " ---"
python -m venv venv
. .\venv\Scripts\Activate.ps1
python -m pip install --upgrade pip
pip install wheel pep517
pip install pywin32
pip install pyinstaller==4.9
Write-Output " ---"
# The environment variable CHIA_INSTALLER_VERSION needs to be defined
if (-not (Test-Path env:CHIA_INSTALLER_VERSION)) {
$env:CHIA_INSTALLER_VERSION = '0.0.0'
Write-Output "WARNING: No environment variable CHIA_INSTALLER_VERSION set. Using 0.0.0"
}
}
Write-Output "Chia Version is: $env:CHIA_INSTALLER_VERSION"
Write-Output " ---"
Write-Output "Checking if madmax exists"
Write-Output " ---"
if (Test-Path -Path .\madmax\) {
Write-Output " madmax exists, moving to expected directory"
mv .\madmax\ .\venv\lib\site-packages\
}
Write-Output "Checking if bladebit exists"
Write-Output " ---"
if (Test-Path -Path .\bladebit\) {
Write-Output " bladebit exists, moving to expected directory"
mv .\bladebit\ .\venv\lib\site-packages\
}
Write-Output " ---"
Write-Output "Build chia-blockchain wheels"
Write-Output " ---"
pip wheel --use-pep517 --extra-index-url https://pypi.chia.net/simple/ -f . --wheel-dir=.\build_scripts\win_build .
Write-Output " ---"
Write-Output "Install chia-blockchain wheels into venv with pip"
Write-Output " ---"
Write-Output "pip install miniupnpc"
Set-Location -Path ".\build_scripts" -PassThru
pip install --no-index --find-links=.\win_build\ miniupnpc
# Write-Output "pip install setproctitle"
# pip install setproctitle==1.2.2
Write-Output "pip install chia-blockchain"
pip install --no-index --find-links=.\win_build\ chia-blockchain
Write-Output " ---"
Write-Output "Use pyinstaller to create chia .exe's"
Write-Output " ---"

View File

@ -134,7 +134,7 @@ class SpendSim:
await self.db_wrapper.close()
async def new_peak(self):
await self.mempool_manager.new_peak(self.block_records[-1], [])
await self.mempool_manager.new_peak(self.block_records[-1], None)
def new_coin_record(self, coin: Coin, coinbase=False) -> CoinRecord:
return CoinRecord(

12
chia/cmds/cmds_util.py Normal file
View File

@ -0,0 +1,12 @@
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.mempool_submission_status import MempoolSubmissionStatus
from chia.wallet.transaction_record import TransactionRecord
def transaction_submitted_msg(tx: TransactionRecord) -> str:
sent_to = [MempoolSubmissionStatus(s[0], s[1], s[2]).to_json_dict_convenience() for s in tx.sent_to]
return f"Transaction submitted to nodes: {sent_to}"
def transaction_status_msg(fingerprint: int, tx_id: bytes32) -> str:
return f"Run 'chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}' to get status"

View File

@ -439,12 +439,9 @@ def chia_init(
protected Keychain. When launching the daemon from the GUI, we want the GUI to
handle unlocking the keychain.
"""
if os.environ.get("CHIA_ROOT", None) is not None:
print(
f"warning, your CHIA_ROOT is set to {os.environ['CHIA_ROOT']}. "
f"Please unset the environment variable and run chia init again\n"
f"or manually migrate config.yaml"
)
chia_root = os.environ.get("CHIA_ROOT", None)
if chia_root is not None:
print(f"CHIA_ROOT is set to {chia_root}")
print(f"Chia directory {root_path}")
if root_path.is_dir() and Path(root_path / "config" / "config.yaml").exists():

View File

@ -1,4 +1,3 @@
from collections import Counter
from decimal import Decimal
from dataclasses import replace
@ -26,6 +25,7 @@ from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.ints import uint16, uint32, uint64
from chia.cmds.cmds_util import transaction_submitted_msg, transaction_status_msg
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.wallet_types import WalletType
@ -100,8 +100,8 @@ async def create(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -
await asyncio.sleep(0.1)
tx = await wallet_client.get_transaction(str(1), tx_record.name)
if len(tx.sent_to) > 0:
print(f"Transaction submitted to nodes: {tx.sent_to}")
print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status")
print(transaction_submitted_msg(tx))
print(transaction_status_msg(fingerprint, tx_record.name))
return None
except Exception as e:
print(f"Error creating plot NFT: {e}\n Please start both farmer and wallet with: chia start -r farmer")
@ -115,7 +115,6 @@ async def pprint_pool_wallet_state(
pool_wallet_info: PoolWalletInfo,
address_prefix: str,
pool_state_dict: Dict,
plot_counts: Counter,
):
if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL and pool_wallet_info.target is None:
expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height
@ -128,7 +127,7 @@ async def pprint_pool_wallet_state(
"Target address (not for plotting): "
f"{encode_puzzle_hash(pool_wallet_info.current.target_puzzle_hash, address_prefix)}"
)
print(f"Number of plots: {plot_counts[pool_wallet_info.p2_singleton_puzzle_hash]}")
print(f"Number of plots: {pool_state_dict[pool_wallet_info.launcher_id]['plot_count']}")
print(f"Owner public key: {pool_wallet_info.current.owner_pubkey}")
print(
@ -182,15 +181,8 @@ async def show(args: dict, wallet_client: WalletRpcClient, fingerprint: int) ->
address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
summaries_response = await wallet_client.get_wallets()
wallet_id_passed_in = args.get("id", None)
plot_counts: Counter = Counter()
try:
pool_state_list: List = (await farmer_client.get_pool_state())["pool_state"]
harvesters = await farmer_client.get_harvesters()
for d in harvesters["harvesters"]:
for plot in d["plots"]:
if plot.get("pool_contract_puzzle_hash", None) is not None:
# Non pooled plots will have a None pool_contract_puzzle_hash
plot_counts[hexstr_to_bytes(plot["pool_contract_puzzle_hash"])] += 1
pool_state_list = (await farmer_client.get_pool_state())["pool_state"]
except Exception as e:
if isinstance(e, aiohttp.ClientConnectorError):
print(
@ -219,7 +211,6 @@ async def show(args: dict, wallet_client: WalletRpcClient, fingerprint: int) ->
pool_wallet_info,
address_prefix,
pool_state_dict,
plot_counts,
)
else:
print(f"Wallet height: {await wallet_client.get_height_info()}")
@ -236,7 +227,6 @@ async def show(args: dict, wallet_client: WalletRpcClient, fingerprint: int) ->
pool_wallet_info,
address_prefix,
pool_state_dict,
plot_counts,
)
print("")
farmer_client.close()
@ -286,8 +276,8 @@ async def submit_tx_with_confirmation(
await asyncio.sleep(0.1)
tx = await wallet_client.get_transaction(str(1), tx_record.name)
if len(tx.sent_to) > 0:
print(f"Transaction submitted to nodes: {tx.sent_to}")
print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status")
print(transaction_submitted_msg(tx))
print(transaction_status_msg(fingerprint, tx_record.name))
return None
except Exception as e:
print(f"Error performing operation on Plot NFT -f {fingerprint} wallet id: {wallet_id}: {e}")

View File

@ -62,9 +62,7 @@ async def async_start(root_path: Path, group: str, restart: bool) -> None:
if await daemon.is_running(service_name=service):
print(f"{service}: ", end="", flush=True)
if restart:
if not await daemon.is_running(service_name=service):
print("not running")
elif await daemon.stop_service(service_name=service):
if await daemon.stop_service(service_name=service):
print("stopped")
else:
print("stop failed")

View File

@ -17,7 +17,10 @@ async def async_stop(root_path: Path, group: str, stop_daemon: bool) -> int:
if stop_daemon:
r = await daemon.exit()
await daemon.close()
print(f"daemon: {r}")
if r.get("data", {}).get("success", False):
print("Daemon stopped")
else:
print(f"Stop daemon failed {r}")
return 0
return_val = 0

View File

@ -4,6 +4,7 @@ from typing import Any, Dict, Optional, Tuple
import click
from chia.wallet.util.wallet_types import WalletType
from chia.wallet.transaction_sorting import SortKey
@click.group("wallet", short_help="Manage your wallet")
@ -65,6 +66,25 @@ def get_transaction_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in
default=None,
help="Prompt for each page of data. Defaults to true for interactive consoles, otherwise false.",
)
@click.option(
"--sort-by-height",
"sort_key",
flag_value=SortKey.CONFIRMED_AT_HEIGHT,
help="Sort transactions by height",
)
@click.option(
"--sort-by-relevance",
"sort_key",
flag_value=SortKey.RELEVANCE,
default=True,
help="Sort transactions by {confirmed, height, time}",
)
@click.option(
"--reverse",
is_flag=True,
default=False,
help="Reverse the transaction ordering",
)
def get_transactions_cmd(
wallet_rpc_port: Optional[int],
fingerprint: int,
@ -73,8 +93,19 @@ def get_transactions_cmd(
limit: int,
verbose: bool,
paginate: Optional[bool],
sort_key: SortKey,
reverse: bool,
) -> None:
extra_params = {"id": id, "verbose": verbose, "offset": offset, "paginate": paginate, "limit": limit}
extra_params = {
"id": id,
"verbose": verbose,
"offset": offset,
"paginate": paginate,
"limit": limit,
"sort_key": sort_key,
"reverse": reverse,
}
import asyncio
from .wallet_funcs import execute_with_wallet, get_transactions

View File

@ -18,15 +18,31 @@ from chia.util.bech32m import encode_puzzle_hash
from chia.util.config import load_config
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.ints import uint16, uint32, uint64
from chia.cmds.cmds_util import transaction_submitted_msg, transaction_status_msg
from chia.wallet.trade_record import TradeRecord
from chia.wallet.trading.offer import Offer
from chia.wallet.trading.trade_status import TradeStatus
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.transaction_type import TransactionType
from chia.wallet.util.wallet_types import WalletType
CATNameResolver = Callable[[bytes32], Awaitable[Optional[Tuple[Optional[uint32], str]]]]
transaction_type_descriptions = {
TransactionType.INCOMING_TX: "received",
TransactionType.OUTGOING_TX: "sent",
TransactionType.COINBASE_REWARD: "rewarded",
TransactionType.FEE_REWARD: "rewarded",
TransactionType.INCOMING_TRADE: "received in trade",
TransactionType.OUTGOING_TRADE: "sent in trade",
}
def transaction_description_from_type(tx: TransactionRecord) -> str:
return transaction_type_descriptions.get(TransactionType(tx.type), "(unknown reason)")
def print_transaction(tx: TransactionRecord, verbose: bool, name, address_prefix: str, mojo_per_unit: int) -> None:
if verbose:
print(tx)
@ -35,7 +51,8 @@ def print_transaction(tx: TransactionRecord, verbose: bool, name, address_prefix
to_address = encode_puzzle_hash(tx.to_puzzle_hash, address_prefix)
print(f"Transaction {tx.name}")
print(f"Status: {'Confirmed' if tx.confirmed else ('In mempool' if tx.is_in_mempool() else 'Pending')}")
print(f"Amount {'sent' if tx.sent else 'received'}: {chia_amount} {name}")
description = transaction_description_from_type(tx)
print(f"Amount {description}: {chia_amount} {name}")
print(f"To address: {to_address}")
print("Created at:", datetime.fromtimestamp(tx.created_at_time).strftime("%Y-%m-%d %H:%M:%S"))
print("")
@ -115,9 +132,13 @@ async def get_transactions(args: dict, wallet_client: WalletRpcClient, fingerpri
paginate = sys.stdout.isatty()
offset = args["offset"]
limit = args["limit"]
sort_key = args["sort_key"]
reverse = args["reverse"]
txs: List[TransactionRecord] = await wallet_client.get_transactions(
wallet_id, start=offset, end=(offset + limit), reverse=True
wallet_id, start=offset, end=(offset + limit), sort_key=sort_key, reverse=reverse
)
config = load_config(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
if len(txs) == 0:
@ -208,8 +229,8 @@ async def send(args: dict, wallet_client: WalletRpcClient, fingerprint: int) ->
await asyncio.sleep(0.1)
tx = await wallet_client.get_transaction(str(wallet_id), tx_id)
if len(tx.sent_to) > 0:
print(f"Transaction submitted to nodes: {tx.sent_to}")
print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id} to get status")
print(transaction_submitted_msg(tx))
print(transaction_status_msg(fingerprint, tx_id))
return None
print("Transaction not yet submitted to nodes")

View File

@ -44,7 +44,7 @@ async def validate_block_body(
fork_point_with_peak: Optional[uint32],
get_block_generator: Callable[[BlockInfo], Awaitable[Optional[BlockGenerator]]],
*,
validate_signature=True,
validate_signature: bool = True,
) -> Tuple[Optional[Err], Optional[NPCResult]]:
"""
This assumes the header block has been completely validated.
@ -152,7 +152,7 @@ async def validate_block_body(
removals_puzzle_dic: Dict[bytes32, bytes32] = {}
cost: uint64 = uint64(0)
# In header validation we check that timestamp is not more that 5 minutes into the future
# In header validation we check that timestamp is not more than 5 minutes into the future
# 6. No transactions before INITIAL_TRANSACTION_FREEZE timestamp
# (this test has been removed)
@ -340,6 +340,9 @@ async def validate_block_body(
assert curr is not None
removal_coin_records: Dict[bytes32, CoinRecord] = {}
# the removed coins we need to look up from the DB
# i.e. all non-ephemeral coins
removals_from_db: List[bytes32] = []
for rem in removals:
if rem in additions_dic:
# Ephemeral coin
@ -353,35 +356,53 @@ async def validate_block_body(
)
removal_coin_records[new_unspent.name] = new_unspent
else:
unspent = await coin_store.get_coin_record(rem)
if unspent is not None and unspent.confirmed_block_index <= fork_h:
# Spending something in the current chain, confirmed before fork
# (We ignore all coins confirmed after fork)
if unspent.spent == 1 and unspent.spent_block_index <= fork_h:
# Check for coins spent in an ancestor block
return Err.DOUBLE_SPEND, None
removal_coin_records[unspent.name] = unspent
else:
# This coin is not in the current heaviest chain, so it must be in the fork
if rem not in additions_since_fork:
# Check for spending a coin that does not exist in this fork
log.error(f"Err.UNKNOWN_UNSPENT: COIN ID: {rem} NPC RESULT: {npc_result}")
return Err.UNKNOWN_UNSPENT, None
new_coin, confirmed_height, confirmed_timestamp = additions_since_fork[rem]
new_coin_record: CoinRecord = CoinRecord(
new_coin,
confirmed_height,
uint32(0),
False,
confirmed_timestamp,
)
removal_coin_records[new_coin_record.name] = new_coin_record
# This check applies to both coins created before fork (pulled from coin_store),
# and coins created after fork (additions_since_fork)
if rem in removals_since_fork:
# This coin was spent in the fork
return Err.DOUBLE_SPEND_IN_FORK, None
removals_from_db.append(rem)
unspent_records = await coin_store.get_coin_records(removals_from_db)
# some coin spends we need to ensure exist in the fork branch. Both coins we
# can't find in the DB, but also coins that were spent after the fork point
look_in_fork: List[bytes32] = []
for unspent in unspent_records:
if unspent.confirmed_block_index <= fork_h:
# Spending something in the current chain, confirmed before fork
# (We ignore all coins confirmed after fork)
if unspent.spent == 1 and unspent.spent_block_index <= fork_h:
# Check for coins spent in an ancestor block
return Err.DOUBLE_SPEND, None
removal_coin_records[unspent.name] = unspent
else:
look_in_fork.append(unspent.name)
if len(unspent_records) != len(removals_from_db):
# some coins could not be found in the DB. We need to find out which
# ones and look for them in additions_since_fork
found: Set[bytes32] = set([u.name for u in unspent_records])
for rem in removals_from_db:
if rem in found:
continue
look_in_fork.append(rem)
for rem in look_in_fork:
# This coin is not in the current heaviest chain, so it must be in the fork
if rem not in additions_since_fork:
# Check for spending a coin that does not exist in this fork
log.error(f"Err.UNKNOWN_UNSPENT: COIN ID: {rem} NPC RESULT: {npc_result}")
return Err.UNKNOWN_UNSPENT, None
new_coin, confirmed_height, confirmed_timestamp = additions_since_fork[rem]
new_coin_record: CoinRecord = CoinRecord(
new_coin,
confirmed_height,
uint32(0),
False,
confirmed_timestamp,
)
removal_coin_records[new_coin_record.name] = new_coin_record
removed = 0
for unspent in removal_coin_records.values():

View File

@ -5,6 +5,7 @@ from typing import Callable, Dict, List, Optional, Tuple
import blspy
from blspy import G1Element, G2Element
from chia_rs import compute_merkle_set_root
from chiabip158 import PyBIP158
from chia.consensus.block_record import BlockRecord
@ -15,7 +16,7 @@ from chia.consensus.constants import ConsensusConstants
from chia.consensus.cost_calculator import NPCResult
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
from chia.full_node.signage_point import SignagePoint
from chia.types.blockchain_format.coin import Coin, hash_coin_list
from chia.types.blockchain_format.coin import Coin, hash_coin_ids
from chia.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo
from chia.types.blockchain_format.pool_target import PoolTarget
from chia.types.blockchain_format.proof_of_space import ProofOfSpace
@ -28,7 +29,6 @@ from chia.types.generator_types import BlockGenerator
from chia.types.unfinished_block import UnfinishedBlock
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint32, uint64, uint128
from chia.util.merkle_set import MerkleSet
from chia.util.prev_transaction_block import get_prev_transaction_block
from chia.util.recursive_replace import recursive_replace
@ -191,35 +191,31 @@ def create_foliage(
tx_additions.append(coin)
byte_array_tx.append(bytearray(coin.puzzle_hash))
for coin in removals:
tx_removals.append(coin.name())
byte_array_tx.append(bytearray(coin.name()))
cname = coin.name()
tx_removals.append(cname)
byte_array_tx.append(bytearray(cname))
bip158: PyBIP158 = PyBIP158(byte_array_tx)
encoded = bytes(bip158.GetEncoded())
removal_merkle_set = MerkleSet()
addition_merkle_set = MerkleSet()
# Create removal Merkle set
for coin_name in tx_removals:
removal_merkle_set.add_already_hashed(coin_name)
additions_merkle_items: List[bytes32] = []
# Create addition Merkle set
puzzlehash_coin_map: Dict[bytes32, List[Coin]] = {}
puzzlehash_coin_map: Dict[bytes32, List[bytes32]] = {}
for coin in tx_additions:
if coin.puzzle_hash in puzzlehash_coin_map:
puzzlehash_coin_map[coin.puzzle_hash].append(coin)
puzzlehash_coin_map[coin.puzzle_hash].append(coin.name())
else:
puzzlehash_coin_map[coin.puzzle_hash] = [coin]
puzzlehash_coin_map[coin.puzzle_hash] = [coin.name()]
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
for puzzle, coins in puzzlehash_coin_map.items():
addition_merkle_set.add_already_hashed(puzzle)
addition_merkle_set.add_already_hashed(hash_coin_list(coins))
for puzzle, coin_ids in puzzlehash_coin_map.items():
additions_merkle_items.append(puzzle)
additions_merkle_items.append(hash_coin_ids(coin_ids))
additions_root = addition_merkle_set.get_root()
removals_root = removal_merkle_set.get_root()
additions_root = bytes32(compute_merkle_set_root(additions_merkle_items))
removals_root = bytes32(compute_merkle_set_root(tx_removals))
generator_hash = bytes32([0] * 32)
if block_generator is not None:
@ -302,7 +298,7 @@ def create_unfinished_block(
additions: Optional[List[Coin]] = None,
removals: Optional[List[Coin]] = None,
prev_block: Optional[BlockRecord] = None,
finished_sub_slots_input: List[EndOfSubSlotBundle] = None,
finished_sub_slots_input: Optional[List[EndOfSubSlotBundle]] = None,
) -> UnfinishedBlock:
"""
Creates a new unfinished block using all the information available at the signage point. This will have to be
@ -517,8 +513,9 @@ def unfinished_block_to_full_block(
new_generator,
new_generator_ref_list,
)
return recursive_replace(
ret = recursive_replace(
ret,
"foliage.reward_block_hash",
ret.reward_chain_block.get_hash(),
)
return ret

View File

@ -44,7 +44,7 @@ def validate_unfinished_header_block(
expected_sub_slot_iters: uint64,
skip_overflow_last_ss_validation: bool = False,
skip_vdf_is_valid: bool = False,
check_sub_epoch_summary=True,
check_sub_epoch_summary: bool = True,
) -> Tuple[Optional[uint64], Optional[ValidationError]]:
"""
Validates an unfinished header block. This is a block without the infusion VDFs (unfinished)
@ -831,7 +831,7 @@ def validate_finished_header_block(
check_filter: bool,
expected_difficulty: uint64,
expected_sub_slot_iters: uint64,
check_sub_epoch_summary=True,
check_sub_epoch_summary: bool = True,
) -> Tuple[Optional[uint64], Optional[ValidationError]]:
"""
Fully validates the header of a block. A header block is the same as a full block, but

View File

@ -85,5 +85,5 @@ class BlockRecord(Streamable):
self.required_iters,
)
def sp_total_iters(self, constants: ConsensusConstants):
return self.sp_sub_slot_total_iters(constants) + self.sp_iters(constants)
def sp_total_iters(self, constants: ConsensusConstants) -> uint128:
return uint128(self.sp_sub_slot_total_iters(constants) + self.sp_iters(constants))

View File

@ -1,44 +1,40 @@
from typing import Dict, List, Optional
from chia.types.blockchain_format.coin import Coin, hash_coin_list
from chia_rs import compute_merkle_set_root
from chia.types.blockchain_format.coin import Coin, hash_coin_ids
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.errors import Err
from chia.util.merkle_set import MerkleSet
def validate_block_merkle_roots(
block_additions_root: bytes32,
block_removals_root: bytes32,
tx_additions: List[Coin] = None,
tx_removals: List[bytes32] = None,
tx_additions: Optional[List[Coin]] = None,
tx_removals: Optional[List[bytes32]] = None,
) -> Optional[Err]:
if tx_removals is None:
tx_removals = []
if tx_additions is None:
tx_additions = []
removal_merkle_set = MerkleSet()
addition_merkle_set = MerkleSet()
# Create removal Merkle set
for coin_name in tx_removals:
removal_merkle_set.add_already_hashed(coin_name)
# Create addition Merkle set
puzzlehash_coins_map: Dict[bytes32, List[Coin]] = {}
puzzlehash_coins_map: Dict[bytes32, List[bytes32]] = {}
for coin in tx_additions:
if coin.puzzle_hash in puzzlehash_coins_map:
puzzlehash_coins_map[coin.puzzle_hash].append(coin)
puzzlehash_coins_map[coin.puzzle_hash].append(coin.name())
else:
puzzlehash_coins_map[coin.puzzle_hash] = [coin]
puzzlehash_coins_map[coin.puzzle_hash] = [coin.name()]
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
for puzzle, coins in puzzlehash_coins_map.items():
addition_merkle_set.add_already_hashed(puzzle)
addition_merkle_set.add_already_hashed(hash_coin_list(coins))
additions_merkle_items: List[bytes32] = []
for puzzle, coin_ids in puzzlehash_coins_map.items():
additions_merkle_items.append(puzzle)
additions_merkle_items.append(hash_coin_ids(coin_ids))
additions_root = addition_merkle_set.get_root()
removals_root = removal_merkle_set.get_root()
additions_root = bytes32(compute_merkle_set_root(additions_merkle_items))
removals_root = bytes32(compute_merkle_set_root(tx_removals))
if block_additions_root != additions_root:
return Err.BAD_ADDITION_ROOT

View File

@ -8,7 +8,7 @@ from concurrent.futures.process import ProcessPoolExecutor
from enum import Enum
from multiprocessing.context import BaseContext
from pathlib import Path
from typing import Dict, List, Optional, Set, Tuple
from typing import Any, Dict, List, Optional, Set, Tuple
from chia.consensus.block_body_validation import validate_block_body
from chia.consensus.block_header_validation import validate_unfinished_header_block
@ -67,9 +67,18 @@ class ReceiveBlockResult(Enum):
DISCONNECTED_BLOCK = 5 # Block's parent (previous pointer) is not in this blockchain
@dataclasses.dataclass
class StateChangeSummary:
peak: BlockRecord
fork_height: uint32
rolled_back_records: List[CoinRecord]
new_npc_results: List[NPCResult]
new_rewards: List[Coin]
class Blockchain(BlockchainInterface):
constants: ConsensusConstants
constants_json: Dict
constants_json: Dict[str, Any]
# peak of the blockchain
_peak_height: Optional[uint32]
@ -108,7 +117,7 @@ class Blockchain(BlockchainInterface):
multiprocessing_context: Optional[BaseContext] = None,
*,
single_threaded: bool = False,
):
) -> "Blockchain":
"""
Initializes a blockchain with the BlockRecords from disk, assuming they have all been
validated. Uses the genesis block given in override_constants, or as a fallback,
@ -142,11 +151,11 @@ class Blockchain(BlockchainInterface):
self.hint_store = hint_store
return self
def shut_down(self):
def shut_down(self) -> None:
self._shut_down = True
self.pool.shutdown(wait=True)
async def _load_chain_from_store(self, blockchain_dir):
async def _load_chain_from_store(self, blockchain_dir: Path) -> None:
"""
Initializes the state of the Blockchain class from the database.
"""
@ -165,7 +174,7 @@ class Blockchain(BlockchainInterface):
assert peak is not None
self._peak_height = self.block_record(peak).height
assert self.__height_map.contains_height(self._peak_height)
assert not self.__height_map.contains_height(self._peak_height + 1)
assert not self.__height_map.contains_height(uint32(self._peak_height + 1))
def get_peak(self) -> Optional[BlockRecord]:
"""
@ -193,12 +202,7 @@ class Blockchain(BlockchainInterface):
block: FullBlock,
pre_validation_result: PreValidationResult,
fork_point_with_peak: Optional[uint32] = None,
) -> Tuple[
ReceiveBlockResult,
Optional[Err],
Optional[uint32],
Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]],
]:
) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[StateChangeSummary]]:
"""
This method must be called under the blockchain lock
Adds a new block into the blockchain, if it's valid and connected to the current
@ -215,24 +219,26 @@ class Blockchain(BlockchainInterface):
The result of adding the block to the blockchain (NEW_PEAK, ADDED_AS_ORPHAN, INVALID_BLOCK,
DISCONNECTED_BLOCK, ALREDY_HAVE_BLOCK)
An optional error if the result is not NEW_PEAK or ADDED_AS_ORPHAN
A fork point if the result is NEW_PEAK
A list of changes to the coin store, and changes to hints, if the result is NEW_PEAK
A StateChangeSumamry iff NEW_PEAK, with:
- A fork point if the result is NEW_PEAK
- A list of coin changes as a result of rollback
- A list of NPCResult for any new transaction block added to the chain
"""
genesis: bool = block.height == 0
if self.contains_block(block.header_hash):
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None, ([], {})
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None
if not self.contains_block(block.prev_header_hash) and not genesis:
return (ReceiveBlockResult.DISCONNECTED_BLOCK, Err.INVALID_PREV_BLOCK_HASH, None, ([], {}))
return ReceiveBlockResult.DISCONNECTED_BLOCK, Err.INVALID_PREV_BLOCK_HASH, None
if not genesis and (self.block_record(block.prev_header_hash).height + 1) != block.height:
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None, ([], {})
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None
npc_result: Optional[NPCResult] = pre_validation_result.npc_result
required_iters = pre_validation_result.required_iters
if pre_validation_result.error is not None:
return ReceiveBlockResult.INVALID_BLOCK, Err(pre_validation_result.error), None, ([], {})
return ReceiveBlockResult.INVALID_BLOCK, Err(pre_validation_result.error), None
assert required_iters is not None
error_code, _ = await validate_block_body(
@ -250,7 +256,7 @@ class Blockchain(BlockchainInterface):
validate_signature=not pre_validation_result.validated_signature,
)
if error_code is not None:
return ReceiveBlockResult.INVALID_BLOCK, error_code, None, ([], {})
return ReceiveBlockResult.INVALID_BLOCK, error_code, None
block_record = block_to_block_record(
self.constants,
@ -265,23 +271,23 @@ class Blockchain(BlockchainInterface):
header_hash: bytes32 = block.header_hash
# Perform the DB operations to update the state, and rollback if something goes wrong
await self.block_store.add_full_block(header_hash, block, block_record)
fork_height, peak_height, records, (coin_record_change, hint_changes) = await self._reconsider_peak(
records, state_change_summary = await self._reconsider_peak(
block_record, genesis, fork_point_with_peak, npc_result
)
# Then update the memory cache. It is important that this task is not cancelled and does not throw
# Then update the memory cache. It is important that this is not cancelled and does not throw
# This is done after all async/DB operations, so there is a decreased chance of failure.
self.add_block_record(block_record)
if fork_height is not None:
self.__height_map.rollback(fork_height)
if state_change_summary is not None:
self.__height_map.rollback(state_change_summary.fork_height)
for fetched_block_record in records:
self.__height_map.update_height(
fetched_block_record.height,
fetched_block_record.header_hash,
fetched_block_record.sub_epoch_summary_included,
)
if peak_height is not None:
self._peak_height = peak_height
await self.__height_map.maybe_flush()
if state_change_summary is not None:
self._peak_height = block_record.height
except BaseException as e:
self.block_store.rollback_cache_block(header_hash)
log.error(
@ -290,23 +296,14 @@ class Blockchain(BlockchainInterface):
)
raise
if fork_height is not None:
# new coin records added
assert coin_record_change is not None
return ReceiveBlockResult.NEW_PEAK, None, fork_height, (coin_record_change, hint_changes)
else:
return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None, ([], {})
# This is done outside the try-except in case it fails, since we do not want to revert anything if it does
await self.__height_map.maybe_flush()
def get_hint_list(self, npc_result: NPCResult) -> List[Tuple[bytes32, bytes]]:
if npc_result.conds is None:
return []
h_list = []
for spend in npc_result.conds.spends:
for puzzle_hash, amount, hint in spend.create_coin:
if hint != b"":
coin_id = Coin(spend.coin_id, puzzle_hash, amount).name()
h_list.append((coin_id, hint))
return h_list
if state_change_summary is not None:
# new coin records added
return ReceiveBlockResult.NEW_PEAK, None, state_change_summary
else:
return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
async def _reconsider_peak(
self,
@ -314,21 +311,16 @@ class Blockchain(BlockchainInterface):
genesis: bool,
fork_point_with_peak: Optional[uint32],
npc_result: Optional[NPCResult],
) -> Tuple[
Optional[uint32],
Optional[uint32],
List[BlockRecord],
Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]],
]:
) -> Tuple[List[BlockRecord], Optional[StateChangeSummary]]:
"""
When a new block is added, this is called, to check if the new block is the new peak of the chain.
This also handles reorgs by reverting blocks which are not in the heaviest chain.
It returns the height of the fork between the previous chain and the new chain, or returns
None if there was no update to the heaviest chain.
It returns the summary of the applied changes, including the height of the fork between the previous chain
and the new chain, or returns None if there was no update to the heaviest chain.
"""
peak = self.get_peak()
latest_coin_state: Dict[bytes32, CoinRecord] = {}
hint_coin_state: Dict[bytes, Dict[bytes32, CoinRecord]] = {}
rolled_back_state: Dict[bytes32, CoinRecord] = {}
if genesis:
if peak is None:
@ -341,26 +333,26 @@ class Blockchain(BlockchainInterface):
tx_removals, tx_additions = [], []
if block.is_transaction_block():
assert block.foliage_transaction_block is not None
added = await self.coin_store.new_block(
await self.coin_store.new_block(
block.height,
block.foliage_transaction_block.timestamp,
block.get_included_reward_coins(),
tx_additions,
tx_removals,
)
else:
added, _ = [], []
await self.block_store.set_in_chain([(block_record.header_hash,)])
await self.block_store.set_peak(block_record.header_hash)
return uint32(0), uint32(0), [block_record], (added, {})
return None, None, [], ([], {})
return [block_record], StateChangeSummary(
block_record, uint32(0), [], [], list(block.get_included_reward_coins())
)
return [], None
assert peak is not None
if block_record.weight <= peak.weight:
# This is not a heavier block than the heaviest we have seen, so we don't change the coin set
return None, None, [], ([], {})
return [], None
# Find the fork. if the block is just being appended, it will return the peak
# Finds the fork. if the block is just being appended, it will return the peak
# If no blocks in common, returns -1, and reverts all blocks
if block_record.prev_hash == peak.header_hash:
fork_height: int = peak.height
@ -370,14 +362,14 @@ class Blockchain(BlockchainInterface):
fork_height = find_fork_point_in_chain(self, block_record, peak)
if block_record.prev_hash != peak.header_hash:
roll_changes: List[CoinRecord] = await self.coin_store.rollback_to_block(fork_height)
for coin_record in roll_changes:
latest_coin_state[coin_record.name] = coin_record
for coin_record in await self.coin_store.rollback_to_block(fork_height):
rolled_back_state[coin_record.name] = coin_record
# Collect all blocks from fork point to new peak
# Collects all blocks from fork point to new peak
blocks_to_add: List[Tuple[FullBlock, BlockRecord]] = []
curr = block_record.header_hash
# Backtracks up to the fork point, pulling all the required blocks from DB (that will soon be in the chain)
while fork_height < 0 or curr != self.height_to_hash(uint32(fork_height)):
fetched_full_block: Optional[FullBlock] = await self.block_store.get_full_block(curr)
fetched_block_record: Optional[BlockRecord] = await self.block_store.get_block_record(curr)
@ -389,12 +381,16 @@ class Blockchain(BlockchainInterface):
break
curr = fetched_block_record.prev_hash
records_to_add = []
records_to_add: List[BlockRecord] = []
npc_results: List[NPCResult] = []
reward_coins: List[Coin] = []
for fetched_full_block, fetched_block_record in reversed(blocks_to_add):
records_to_add.append(fetched_block_record)
if not fetched_full_block.is_transaction_block():
# Coins are only created in TX blocks so there are no state updates for this block
continue
# We need to recompute the additions and removals, since they are not stored on DB (only generator is).
if fetched_block_record.header_hash == block_record.header_hash:
tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions(
fetched_full_block, npc_result
@ -402,37 +398,21 @@ class Blockchain(BlockchainInterface):
else:
tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions(fetched_full_block, None)
# Collect the NPC results for later post-processing
if npc_res is not None:
npc_results.append(npc_res)
# Apply the coin store changes for each block that is now in the blockchain
assert fetched_full_block.foliage_transaction_block is not None
added_rec = await self.coin_store.new_block(
await self.coin_store.new_block(
fetched_full_block.height,
fetched_full_block.foliage_transaction_block.timestamp,
fetched_full_block.get_included_reward_coins(),
tx_additions,
tx_removals,
)
removed_rec: List[Optional[CoinRecord]] = [
await self.coin_store.get_coin_record(name) for name in tx_removals
]
# Set additions first, then removals in order to handle ephemeral coin state
# Add in height order is also required
record: Optional[CoinRecord]
for record in added_rec:
assert record
latest_coin_state[record.name] = record
for record in removed_rec:
assert record
latest_coin_state[record.name] = record
if npc_res is not None:
hint_list: List[Tuple[bytes32, bytes]] = self.get_hint_list(npc_res)
await self.hint_store.add_hints(hint_list)
# There can be multiple coins for the same hint
for coin_id, hint in hint_list:
key = hint
if key not in hint_coin_state:
hint_coin_state[key] = {}
hint_coin_state[key][coin_id] = latest_coin_state[coin_id]
# Collect the new reward coins for later post-processing
reward_coins.extend(fetched_full_block.get_included_reward_coins())
# we made it to the end successfully
# Rollback sub_epoch_summaries
@ -441,11 +421,9 @@ class Blockchain(BlockchainInterface):
# Changes the peak to be the new peak
await self.block_store.set_peak(block_record.header_hash)
return (
uint32(max(fork_height, 0)),
block_record.height,
records_to_add,
(list(latest_coin_state.values()), hint_coin_state),
return records_to_add, StateChangeSummary(
block_record, uint32(max(fork_height, 0)), list(rolled_back_state.values()), npc_results, reward_coins
)
async def get_tx_removals_and_additions(
@ -561,7 +539,7 @@ class Blockchain(BlockchainInterface):
return list(reversed(recent_rc))
async def validate_unfinished_block(
self, block: UnfinishedBlock, npc_result: Optional[NPCResult], skip_overflow_ss_validation=True
self, block: UnfinishedBlock, npc_result: Optional[NPCResult], skip_overflow_ss_validation: bool = True
) -> PreValidationResult:
if (
not self.contains_block(block.prev_header_hash)
@ -654,9 +632,9 @@ class Blockchain(BlockchainInterface):
npc_result_bytes = await task
if npc_result_bytes is None:
raise ConsensusError(Err.UNKNOWN)
ret = NPCResult.from_bytes(npc_result_bytes)
ret: NPCResult = NPCResult.from_bytes(npc_result_bytes)
if ret.error is not None:
raise ConsensusError(ret.error)
raise ConsensusError(Err(ret.error))
return ret
def contains_block(self, header_hash: bytes32) -> bool:
@ -693,7 +671,7 @@ class Blockchain(BlockchainInterface):
def get_peak_height(self) -> Optional[uint32]:
return self._peak_height
async def warmup(self, fork_point: uint32):
async def warmup(self, fork_point: uint32) -> None:
"""
Loads blocks into the cache. The blocks loaded include all blocks from
fork point - BLOCKS_CACHE_SIZE up to and including the fork_point.
@ -710,7 +688,7 @@ class Blockchain(BlockchainInterface):
for block_record in block_records.values():
self.add_block_record(block_record)
def clean_block_record(self, height: int):
def clean_block_record(self, height: int) -> None:
"""
Clears all block records in the cache which have block_record < height.
Args:
@ -729,7 +707,7 @@ class Blockchain(BlockchainInterface):
height = height - 1
blocks_to_remove = self.__heights_in_cache.get(uint32(height), None)
def clean_block_records(self):
def clean_block_records(self) -> None:
"""
Cleans the cache so that we only maintain relevant blocks. This removes
block records that have height < peak - BLOCKS_CACHE_SIZE.
@ -793,7 +771,7 @@ class Blockchain(BlockchainInterface):
return None
return header_dict[header_hash]
async def get_block_records_at(self, heights: List[uint32], batch_size=900) -> List[BlockRecord]:
async def get_block_records_at(self, heights: List[uint32], batch_size: int = 900) -> List[BlockRecord]:
"""
gets block records by height (only blocks that are part of the chain)
"""
@ -820,12 +798,12 @@ class Blockchain(BlockchainInterface):
return self.__block_records[header_hash]
return await self.block_store.get_block_record(header_hash)
def remove_block_record(self, header_hash: bytes32):
def remove_block_record(self, header_hash: bytes32) -> None:
sbr = self.block_record(header_hash)
del self.__block_records[header_hash]
self.__heights_in_cache[sbr.height].remove(header_hash)
def add_block_record(self, block_record: BlockRecord):
def add_block_record(self, block_record: BlockRecord) -> None:
"""
Adds a block record to the cache.
"""
@ -837,8 +815,8 @@ class Blockchain(BlockchainInterface):
async def persist_sub_epoch_challenge_segments(
self, ses_block_hash: bytes32, segments: List[SubEpochChallengeSegment]
):
return await self.block_store.persist_sub_epoch_challenge_segments(ses_block_hash, segments)
) -> None:
await self.block_store.persist_sub_epoch_challenge_segments(ses_block_hash, segments)
async def get_sub_epoch_challenge_segments(
self,
@ -863,7 +841,7 @@ class Blockchain(BlockchainInterface):
return False
async def get_block_generator(
self, block: BlockInfo, additional_blocks: Dict[bytes32, FullBlock] = None
self, block: BlockInfo, additional_blocks: Optional[Dict[bytes32, FullBlock]] = None
) -> Optional[BlockGenerator]:
if additional_blocks is None:
additional_blocks = {}

View File

@ -34,16 +34,16 @@ class BlockchainInterface:
def contains_block(self, header_hash: bytes32) -> bool:
pass
def remove_block_record(self, header_hash: bytes32):
def remove_block_record(self, header_hash: bytes32) -> None:
pass
def add_block_record(self, block_record: BlockRecord):
def add_block_record(self, block_record: BlockRecord) -> None:
pass
def contains_height(self, height: uint32) -> bool:
pass
async def warmup(self, fork_point: uint32):
async def warmup(self, fork_point: uint32) -> None:
pass
async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]:
@ -72,7 +72,7 @@ class BlockchainInterface:
async def persist_sub_epoch_challenge_segments(
self, sub_epoch_summary_height: bytes32, segments: List[SubEpochChallengeSegment]
):
) -> None:
pass
async def get_sub_epoch_challenge_segments(

View File

@ -18,11 +18,11 @@ def farmer_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes3
return bytes32(genesis_challenge[16:] + block_height.to_bytes(16, "big"))
def create_pool_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
def create_pool_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32) -> Coin:
parent_id = pool_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)
def create_farmer_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
def create_farmer_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32) -> Coin:
parent_id = farmer_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)

View File

@ -1,5 +1,6 @@
import dataclasses
import logging
from typing import Any
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.byte_types import hexstr_to_bytes
@ -61,10 +62,10 @@ class ConsensusConstants:
POOL_SUB_SLOT_ITERS: uint64
SOFT_FORK_HEIGHT: uint32
def replace(self, **changes) -> "ConsensusConstants":
def replace(self, **changes: object) -> "ConsensusConstants":
return dataclasses.replace(self, **changes)
def replace_str_to_bytes(self, **changes) -> "ConsensusConstants":
def replace_str_to_bytes(self, **changes: Any) -> "ConsensusConstants":
"""
Overrides str (hex) values with bytes.
"""

View File

@ -38,8 +38,8 @@ testnet_kwargs = {
"3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af"
),
"MAX_VDF_WITNESS_SIZE": 64,
# Size of mempool = 50x the size of block # temporary change until #9125 gets in
"MEMPOOL_BLOCK_BUFFER": 10,
# Size of mempool = 50x the size of block
"MEMPOOL_BLOCK_BUFFER": 50,
# Max coin amount, fits into 64 bits
"MAX_COIN_AMOUNT": uint64((1 << 64) - 1),
# Max block cost in clvm cost units

View File

@ -199,7 +199,7 @@ def _get_next_sub_slot_iters(
block_at_height_included_ses: bool,
new_slot: bool,
signage_point_total_iters: uint128,
skip_epoch_check=False,
skip_epoch_check: bool = False,
) -> uint64:
"""
Returns the slot iterations required for the next block after the one at height, where new_slot is true
@ -278,7 +278,7 @@ def _get_next_difficulty(
block_at_height_included_ses: bool,
new_slot: bool,
signage_point_total_iters: uint128,
skip_epoch_check=False,
skip_epoch_check: bool = False,
) -> uint64:
"""
Returns the difficulty of the next block that extends onto block.

View File

@ -58,7 +58,7 @@ def get_block_challenge(
genesis_block: bool,
overflow: bool,
skip_overflow_last_ss_validation: bool,
):
) -> bytes32:
if len(header_block.finished_sub_slots) > 0:
if overflow:
# New sub-slot with overflow block

View File

@ -3,7 +3,7 @@ import logging
import traceback
from concurrent.futures import Executor
from dataclasses import dataclass
from typing import Awaitable, Callable, Dict, List, Optional, Sequence, Tuple
from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence, Tuple
from blspy import AugSchemeMPL, G1Element
@ -45,7 +45,7 @@ class PreValidationResult(Streamable):
def batch_pre_validate_blocks(
constants_dict: Dict,
constants_dict: Dict[str, Any],
blocks_pickled: Dict[bytes, bytes],
full_blocks_pickled: Optional[List[bytes]],
header_blocks_pickled: Optional[List[bytes]],
@ -163,13 +163,13 @@ def batch_pre_validate_blocks(
async def pre_validate_blocks_multiprocessing(
constants: ConsensusConstants,
constants_json: Dict,
constants_json: Dict[str, Any],
block_records: BlockchainInterface,
blocks: Sequence[FullBlock],
pool: Executor,
check_filter: bool,
npc_results: Dict[uint32, NPCResult],
get_block_generator: Callable[[BlockInfo, Optional[Dict[bytes32, FullBlock]]], Awaitable[Optional[BlockGenerator]]],
get_block_generator: Callable[[BlockInfo, Dict[bytes32, FullBlock]], Awaitable[Optional[BlockGenerator]]],
batch_size: int,
wp_summaries: Optional[List[SubEpochSummary]] = None,
*,

View File

@ -14,4 +14,4 @@ def _expected_plot_size(k: int) -> uint64:
is necessary to store the entries in the plot.
"""
return ((2 * k) + 1) * (2 ** (k - 1))
return uint64(((2 * k) + 1) * (2 ** (k - 1)))

View File

@ -1,4 +1,4 @@
from typing import List, Optional
from typing import List, Optional, Tuple
from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain_interface import BlockchainInterface
@ -17,7 +17,7 @@ def get_signage_point_vdf_info(
blocks: BlockchainInterface,
sp_total_iters: uint128,
sp_iters: uint64,
):
) -> Tuple[bytes32, bytes32, ClassgroupElement, ClassgroupElement, uint64, uint64]:
"""
Returns the following information, for the VDF of the signage point at sp_total_iters.
cc and rc challenge hash

View File

@ -67,7 +67,7 @@ class DaemonProxy:
request_id = request["request_id"]
self._request_dict[request_id] = asyncio.Event()
string = dict_to_json_str(request)
if self.websocket is None:
if self.websocket is None or self.websocket.closed:
raise Exception("Websocket is not connected")
asyncio.create_task(self.websocket.send_str(string))

View File

@ -143,6 +143,7 @@ class WebSocketServer:
self.plots_queue: List[Dict] = []
self.connections: Dict[str, List[WebSocketResponse]] = dict() # service_name : [WebSocket]
self.remote_address_map: Dict[WebSocketResponse, str] = dict() # socket: service_name
self.ping_job: Optional[asyncio.Task] = None
self.net_config = load_config(root_path, "config.yaml")
self.self_hostname = self.net_config["self_hostname"]
self.daemon_port = self.net_config["daemon_port"]
@ -206,6 +207,7 @@ class WebSocketServer:
self.log.error(f"Error while canceling task.{e} {task}")
async def stop(self) -> Dict[str, Any]:
self.cancel_task_safe(self.ping_job)
jobs = []
for service_name in self.services.keys():
jobs.append(kill_service(self.root_path, self.services, service_name))
@ -221,7 +223,7 @@ class WebSocketServer:
while True:
msg = await ws.receive()
self.log.debug(f"Received message: {msg}")
self.log.debug("Received message: %s", msg)
if msg.type == WSMsgType.TEXT:
try:
decoded = json.loads(msg.data)
@ -271,6 +273,28 @@ class WebSocketServer:
after_removal.append(connection)
self.connections[service_name] = after_removal
async def ping_task(self) -> None:
restart = True
await asyncio.sleep(30)
for remote_address, service_name in self.remote_address_map.items():
if service_name in self.connections:
sockets = self.connections[service_name]
for socket in sockets:
try:
self.log.debug(f"About to ping: {service_name}")
await socket.ping()
except asyncio.CancelledError:
self.log.warning("Ping task received Cancel")
restart = False
break
except Exception:
self.log.exception("Ping error")
self.log.error("Ping failed, connection closed.")
self.remove_connection(socket)
await socket.close()
if restart is True:
self.ping_job = asyncio.create_task(self.ping_task())
async def handle_message(
self, websocket: WebSocketResponse, message: WsRpcMessage
) -> Tuple[Optional[str], List[Any]]:
@ -614,7 +638,7 @@ class WebSocketServer:
response = create_payload("keyring_status_changed", keyring_status, "daemon", destination)
for websocket in websockets:
for websocket in websockets.copy():
try:
await websocket.send_str(response)
except Exception as e:
@ -673,7 +697,7 @@ class WebSocketServer:
response = create_payload("state_changed", message, service, "wallet_ui")
for websocket in websockets:
for websocket in websockets.copy():
try:
await websocket.send_str(response)
except Exception as e:
@ -1148,6 +1172,8 @@ class WebSocketServer:
}
else:
self.remote_address_map[websocket] = service
if self.ping_job is None:
self.ping_job = asyncio.create_task(self.ping_task())
self.log.info(f"registered for service {service}")
log.info(f"{response}")
return response
@ -1402,13 +1428,13 @@ def run_daemon(root_path: Path, wait_for_unlock: bool = False) -> int:
return result
def main(argv) -> int:
def main() -> int:
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.keychain import Keychain
wait_for_unlock = "--wait-for-unlock" in argv and Keychain.is_keyring_locked()
wait_for_unlock = "--wait-for-unlock" in sys.argv[1:] and Keychain.is_keyring_locked()
return run_daemon(DEFAULT_ROOT_PATH, wait_for_unlock)
if __name__ == "__main__":
main(sys.argv[1:])
main()

View File

@ -2,15 +2,14 @@ import asyncio
import json
import logging
import time
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
import traceback
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
import aiohttp
from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey
import chia.server.ws_connection as ws # lgtm [py/import-and-import-from]
from chia.consensus.coinbase import create_puzzlehash_for_pk
from chia.consensus.constants import ConsensusConstants
from chia.daemon.keychain_proxy import (
KeychainProxy,
@ -18,20 +17,20 @@ from chia.daemon.keychain_proxy import (
connect_to_keychain_and_validate,
wrap_local_keychain,
)
from chia.plot_sync.receiver import Receiver
from chia.plot_sync.delta import Delta
from chia.pools.pool_config import PoolWalletConfig, load_pool_config, add_auth_key
from chia.plot_sync.receiver import Receiver
from chia.pools.pool_config import PoolWalletConfig, add_auth_key, load_pool_config
from chia.protocols import farmer_protocol, harvester_protocol
from chia.protocols.pool_protocol import (
AuthenticationPayload,
ErrorResponse,
get_current_authentication_token,
GetFarmerResponse,
PoolErrorCode,
PostFarmerPayload,
PostFarmerRequest,
PutFarmerPayload,
PutFarmerRequest,
AuthenticationPayload,
get_current_authentication_token,
)
from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.server.outbound_message import NodeType, make_msg
@ -42,16 +41,16 @@ from chia.types.blockchain_format.proof_of_space import ProofOfSpace
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.bech32m import decode_puzzle_hash
from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config, lock_and_load_config, save_config, config_path_for_filename
from chia.util.config import config_path_for_filename, load_config, lock_and_load_config, save_config
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint16, uint32, uint64
from chia.util.keychain import Keychain
from chia.wallet.derive_keys import (
master_sk_to_farmer_sk,
master_sk_to_pool_sk,
master_sk_to_wallet_sk,
find_authentication_sk,
find_owner_sk,
master_sk_to_farmer_sk,
master_sk_to_pool_sk,
match_address_to_sk,
)
from chia.wallet.puzzles.singleton_top_layer import SINGLETON_MOD
@ -60,6 +59,7 @@ singleton_mod_hash = SINGLETON_MOD.get_tree_hash()
log = logging.getLogger(__name__)
UPDATE_POOL_INFO_INTERVAL: int = 3600
UPDATE_POOL_INFO_FAILURE_RETRY_INTERVAL: int = 120
UPDATE_POOL_FARMER_INFO_INTERVAL: int = 300
"""
@ -471,6 +471,8 @@ class Farmer:
# Only update the first time from GET /pool_info, gets updated from GET /farmer later
if pool_state["current_difficulty"] is None:
pool_state["current_difficulty"] = pool_info["minimum_difficulty"]
else:
pool_state["next_pool_info_update"] = time.time() + UPDATE_POOL_INFO_FAILURE_RETRY_INTERVAL
if time.time() >= pool_state["next_farmer_update"]:
pool_state["next_farmer_update"] = time.time() + UPDATE_POOL_FARMER_INFO_INTERVAL
@ -503,7 +505,7 @@ class Farmer:
farmer_info, error_code = await update_pool_farmer_info()
if error_code == PoolErrorCode.FARMER_NOT_KNOWN:
# Make the farmer known on the pool with a POST /farmer
owner_sk_and_index: Optional[PrivateKey, uint32] = find_owner_sk(
owner_sk_and_index: Optional[Tuple[PrivateKey, uint32]] = find_owner_sk(
self.all_root_sks, pool_config.owner_public_key
)
assert owner_sk_and_index is not None
@ -527,7 +529,7 @@ class Farmer:
and pool_config.payout_instructions.lower() != farmer_info.payout_instructions.lower()
)
if payout_instructions_update_required or error_code == PoolErrorCode.INVALID_SIGNATURE:
owner_sk_and_index: Optional[PrivateKey, uint32] = find_owner_sk(
owner_sk_and_index: Optional[Tuple[PrivateKey, uint32]] = find_owner_sk(
self.all_root_sks, pool_config.owner_public_key
)
assert owner_sk_and_index is not None
@ -550,25 +552,30 @@ class Farmer:
def get_private_keys(self):
return self._private_keys
async def get_reward_targets(self, search_for_private_key: bool) -> Dict:
async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict:
if search_for_private_key:
all_sks = await self.get_all_private_keys()
stop_searching_for_farmer, stop_searching_for_pool = False, False
for i in range(500):
if stop_searching_for_farmer and stop_searching_for_pool and i > 0:
break
for sk, _ in all_sks:
ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1())
have_farmer_sk, have_pool_sk = False, False
search_addresses: List[bytes32] = [self.farmer_target, self.pool_target]
for sk, _ in all_sks:
found_addresses: Set[bytes32] = match_address_to_sk(sk, search_addresses, max_ph_to_search)
if not have_farmer_sk and self.farmer_target in found_addresses:
search_addresses.remove(self.farmer_target)
have_farmer_sk = True
if not have_pool_sk and self.pool_target in found_addresses:
search_addresses.remove(self.pool_target)
have_pool_sk = True
if have_farmer_sk and have_pool_sk:
break
if ph == self.farmer_target:
stop_searching_for_farmer = True
if ph == self.pool_target:
stop_searching_for_pool = True
return {
"farmer_target": self.farmer_target_encoded,
"pool_target": self.pool_target_encoded,
"have_farmer_sk": stop_searching_for_farmer,
"have_pool_sk": stop_searching_for_pool,
"have_farmer_sk": have_farmer_sk,
"have_pool_sk": have_pool_sk,
}
return {
"farmer_target": self.farmer_target_encoded,

View File

@ -1,6 +1,6 @@
import json
import time
from typing import Callable, Optional, List, Any, Dict, Tuple
from typing import Any, Dict, List, Optional, Tuple
import aiohttp
from blspy import AugSchemeMPL, G2Element, PrivateKey
@ -12,17 +12,17 @@ from chia.consensus.pot_iterations import calculate_iterations_quality, calculat
from chia.farmer.farmer import Farmer
from chia.protocols import farmer_protocol, harvester_protocol
from chia.protocols.harvester_protocol import (
PoolDifficulty,
PlotSyncStart,
PlotSyncPlotList,
PlotSyncPathList,
PlotSyncDone,
PlotSyncPathList,
PlotSyncPlotList,
PlotSyncStart,
PoolDifficulty,
)
from chia.protocols.pool_protocol import (
get_current_authentication_token,
PoolErrorCode,
PostPartialRequest,
PostPartialPayload,
PostPartialRequest,
get_current_authentication_token,
)
from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.server.outbound_message import NodeType, make_msg
@ -50,9 +50,6 @@ class FarmerAPI:
def __init__(self, farmer) -> None:
self.farmer = farmer
def _set_state_changed_callback(self, callback: Callable):
self.farmer.state_changed_callback = callback
@api_request
@peer_required
async def new_proof_of_space(
@ -272,6 +269,17 @@ class FarmerAPI:
self.farmer.log.error(f"Error connecting to pool: {e}")
return
self.farmer.state_changed(
"submitted_partial",
{
"launcher_id": post_partial_request.payload.launcher_id.hex(),
"pool_url": pool_url,
"current_difficulty": pool_state_dict["current_difficulty"],
"points_acknowledged_since_start": pool_state_dict["points_acknowledged_since_start"],
"points_acknowledged_24h": pool_state_dict["points_acknowledged_24h"],
},
)
return
@api_request

View File

@ -136,7 +136,7 @@ class BlockHeightMap:
if ses is not None:
self.__sub_epoch_summaries[height] = bytes(ses)
async def maybe_flush(self):
async def maybe_flush(self) -> None:
if self.__dirty < 1000:
return

View File

@ -179,6 +179,49 @@ class CoinStore:
return record
return None
async def get_coin_records(self, names: List[bytes32]) -> List[CoinRecord]:
if len(names) == 0:
return []
coins: List[CoinRecord] = []
new_names: List[bytes32] = []
for n in names:
cached = self.coin_record_cache.get(n)
if cached is not None:
coins.append(cached)
else:
new_names.append(n)
names = new_names
if len(names) == 0:
return coins
async with self.db_wrapper.read_db() as conn:
cursors: List[Cursor] = []
for names_chunk in chunks(names, MAX_SQLITE_PARAMETERS):
names_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
names_db = tuple(names_chunk)
else:
names_db = tuple([n.hex() for n in names_chunk])
cursors.append(
await conn.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f"coin_parent, amount, timestamp FROM coin_record "
f'WHERE coin_name in ({",".join(["?"] * len(names_db))}) ',
names_db,
)
)
for cursor in cursors:
for row in await cursor.fetchall():
coin = self.row_to_coin(row)
record = CoinRecord(coin, row[0], row[1], row[2], row[6])
coins.append(record)
self.coin_record_cache.put(record.coin.name(), record)
return coins
async def get_coins_added_at_height(self, height: uint32) -> List[CoinRecord]:
async with self.db_wrapper.read_db() as conn:
async with conn.execute(
@ -428,6 +471,7 @@ class CoinStore:
self.coin_record_cache.remove(coin_name)
coin_changes: Dict[bytes32, CoinRecord] = {}
# Add coins that are confirmed in the reverted blocks to the list of updated coins.
async with self.db_wrapper.write_db() as conn:
async with conn.execute(
"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
@ -439,12 +483,13 @@ class CoinStore:
record = CoinRecord(coin, uint32(0), row[1], row[2], uint64(0))
coin_changes[record.name] = record
# Delete from storage
# Delete reverted blocks from storage
await conn.execute("DELETE FROM coin_record WHERE confirmed_index>?", (block_index,))
# Add coins that are confirmed in the reverted blocks to the list of changed coins.
async with conn.execute(
"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
"coin_parent, amount, timestamp FROM coin_record WHERE confirmed_index>?",
"coin_parent, amount, timestamp FROM coin_record WHERE spent_index>?",
(block_index,),
) as cursor:
for row in await cursor.fetchall():

View File

@ -17,7 +17,7 @@ from blspy import AugSchemeMPL
import chia.server.ws_connection as ws # lgtm [py/import-and-import-from]
from chia.consensus.block_creation import unfinished_block_to_full_block
from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain import Blockchain, ReceiveBlockResult
from chia.consensus.blockchain import Blockchain, ReceiveBlockResult, StateChangeSummary
from chia.consensus.blockchain_interface import BlockchainInterface
from chia.consensus.constants import ConsensusConstants
from chia.consensus.cost_calculator import NPCResult
@ -26,6 +26,7 @@ from chia.consensus.make_sub_epoch_summary import next_sub_epoch_summary
from chia.consensus.multiprocess_validation import PreValidationResult
from chia.consensus.pot_iterations import calculate_sp_iters
from chia.full_node.block_store import BlockStore
from chia.full_node.hint_management import get_hints_and_subscription_coin_ids
from chia.full_node.lock_queue import LockQueue, LockClient
from chia.full_node.bundle_tools import detect_potential_template_generator
from chia.full_node.coin_store import CoinStore
@ -78,6 +79,15 @@ from chia.util.db_synchronous import db_synchronous_on
from chia.util.db_version import lookup_db_version, set_db_version_async
# This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2
@dataclasses.dataclass
class PeakPostProcessingResult:
mempool_peak_result: List[Tuple[SpendBundle, NPCResult, bytes32]] # The result of calling MempoolManager.new_peak
fns_peak_result: FullNodeStorePeakResult # The result of calling FullNodeStore.new_peak
hints: List[Tuple[bytes32, bytes]] # The hints added to the DB
lookup_coin_ids: List[bytes32] # The coin IDs that we need to look up to notify wallets of changes
class FullNode:
block_store: BlockStore
full_node_store: FullNodeStore
@ -272,18 +282,17 @@ class FullNode:
f"time taken: {int(time_taken)}s"
)
async with self._blockchain_lock_high_priority:
pending_tx = await self.mempool_manager.new_peak(self.blockchain.get_peak(), [])
pending_tx = await self.mempool_manager.new_peak(self.blockchain.get_peak(), None)
assert len(pending_tx) == 0 # no pending transactions when starting up
peak: Optional[BlockRecord] = self.blockchain.get_peak()
if peak is not None:
full_peak = await self.blockchain.get_full_peak()
mempool_new_peak_result, fns_peak_result = await self.peak_post_processing(
full_peak, peak, max(peak.height - 1, 0), None, []
)
await self.peak_post_processing_2(
full_peak, peak, max(peak.height - 1, 0), None, ([], {}), mempool_new_peak_result, fns_peak_result
state_change_summary = StateChangeSummary(peak, max(peak.height - 1, 0), [], [], [])
ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
full_peak, state_change_summary, None
)
await self.peak_post_processing_2(full_peak, None, state_change_summary, ppp_result)
if self.config["send_uncompact_interval"] != 0:
sanitize_weight_proof_only = False
if "sanitize_weight_proof_only" in self.config:
@ -322,7 +331,7 @@ class FullNode:
try:
while not self._shut_down:
# We use a semaphore to make sure we don't send more than 200 concurrent calls of respond_transaction.
# However doing them one at a time would be slow, because they get sent to other processes.
# However, doing them one at a time would be slow, because they get sent to other processes.
await self.respond_transaction_semaphore.acquire()
item: TransactionQueueEntry = (await self.transaction_queue.get())[1]
asyncio.create_task(self._handle_one_transaction(item))
@ -437,27 +446,25 @@ class FullNode:
if not response:
raise ValueError(f"Error short batch syncing, invalid/no response for {height}-{end_height}")
async with self._blockchain_lock_high_priority:
success, advanced_peak, fork_height, coin_changes = await self.receive_block_batch(
response.blocks, peer, None
)
state_change_summary: Optional[StateChangeSummary]
success, state_change_summary = await self.receive_block_batch(response.blocks, peer, None)
if not success:
raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}")
if advanced_peak:
peak = self.blockchain.get_peak()
if state_change_summary is not None:
try:
peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak()
assert peak is not None and peak_fb is not None and fork_height is not None
mempool_new_peak_result, fns_peak_result = await self.peak_post_processing(
peak_fb, peak, fork_height, peer, coin_changes[0]
assert peak_fb is not None
ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
peak_fb,
state_change_summary,
peer,
)
await self.peak_post_processing_2(
peak_fb, peak, fork_height, peer, coin_changes, mempool_new_peak_result, fns_peak_result
)
except asyncio.CancelledError:
# Still do post processing after cancel
await self.peak_post_processing_2(peak_fb, peer, state_change_summary, ppp_result)
except Exception:
# Still do post processing after cancel (or exception)
peak_fb = await self.blockchain.get_full_peak()
assert peak is not None and peak_fb is not None and fork_height is not None
await self.peak_post_processing(peak_fb, peak, fork_height, peer, coin_changes[0])
assert peak_fb is not None
await self.peak_post_processing(peak_fb, state_change_summary, peer)
raise
finally:
self.log.info(f"Added blocks {height}-{end_height}")
@ -942,7 +949,7 @@ class FullNode:
)
batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
async def fetch_block_batches(batch_queue, peers_with_peak: List[ws.WSChiaConnection]):
async def fetch_block_batches(batch_queue: asyncio.Queue, peers_with_peak: List[ws.WSChiaConnection]):
try:
for start_height in range(fork_point_height, target_peak_sb_height, batch_size):
end_height = min(target_peak_sb_height, start_height + batch_size)
@ -973,17 +980,17 @@ class FullNode:
# finished signal with None
await batch_queue.put(None)
async def validate_block_batches(batch_queue):
async def validate_block_batches(inner_batch_queue: asyncio.Queue):
advanced_peak = False
while True:
res = await batch_queue.get()
res = await inner_batch_queue.get()
if res is None:
self.log.debug("done fetching blocks")
return
peer, blocks = res
start_height = blocks[0].height
end_height = blocks[-1].height
success, advanced_peak, fork_height, coin_states = await self.receive_block_batch(
success, state_change_summary = await self.receive_block_batch(
blocks, peer, None if advanced_peak else uint32(fork_point_height), summaries
)
if success is False:
@ -992,9 +999,16 @@ class FullNode:
await peer.close(600)
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
self.log.info(f"Added blocks {start_height} to {end_height}")
peak = self.blockchain.get_peak()
if len(coin_states) > 0 and fork_height is not None:
await self.update_wallets(peak.height, fork_height, peak.header_hash, coin_states)
peak: Optional[BlockRecord] = self.blockchain.get_peak()
if state_change_summary is not None:
advanced_peak = True
assert peak is not None
# Hints must be added to the DB. The other post-processing tasks are not required when syncing
hints_to_add, lookup_coin_ids = get_hints_and_subscription_coin_ids(
state_change_summary, self.coin_subscriptions, self.ph_subscriptions
)
await self.hint_store.add_hints(hints_to_add)
await self.update_wallets(state_change_summary, hints_to_add, lookup_coin_ids)
await self.send_peak_to_wallets()
self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE)
@ -1029,51 +1043,47 @@ class FullNode:
async def update_wallets(
self,
height: uint32,
fork_height: uint32,
peak_hash: bytes32,
state_update: Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]],
):
state_change_summary: StateChangeSummary,
hints: List[Tuple[bytes32, bytes]],
lookup_coin_ids: List[bytes32],
) -> None:
# Looks up coin records in DB for the coins that wallets are interested in
new_states: List[CoinRecord] = await self.coin_store.get_coin_records(list(lookup_coin_ids))
# Re-arrange to a map, and filter out any non-ph sized hint
coin_id_to_ph_hint: Dict[bytes32, bytes32] = {
coin_id: bytes32(hint) for coin_id, hint in hints if len(hint) == 32
}
changes_for_peer: Dict[bytes32, Set[CoinState]] = {}
for coin_record in state_change_summary.rolled_back_records + [s for s in new_states if s is not None]:
cr_name: bytes32 = coin_record.name
for peer in self.coin_subscriptions.get(cr_name, []):
if peer not in changes_for_peer:
changes_for_peer[peer] = set()
changes_for_peer[peer].add(coin_record.coin_state)
states, hint_state = state_update
for peer in self.ph_subscriptions.get(coin_record.coin.puzzle_hash, []):
if peer not in changes_for_peer:
changes_for_peer[peer] = set()
changes_for_peer[peer].add(coin_record.coin_state)
for coin_record in states:
if coin_record.name in self.coin_subscriptions:
subscribed_peers = self.coin_subscriptions[coin_record.name]
for peer in subscribed_peers:
if cr_name in coin_id_to_ph_hint:
for peer in self.ph_subscriptions.get(coin_id_to_ph_hint[cr_name], []):
if peer not in changes_for_peer:
changes_for_peer[peer] = set()
changes_for_peer[peer].add(coin_record.coin_state)
if coin_record.coin.puzzle_hash in self.ph_subscriptions:
subscribed_peers = self.ph_subscriptions[coin_record.coin.puzzle_hash]
for peer in subscribed_peers:
if peer not in changes_for_peer:
changes_for_peer[peer] = set()
changes_for_peer[peer].add(coin_record.coin_state)
# This is just a verification that the assumptions justifying the ignore below
# are valid.
hint: bytes
for hint, records in hint_state.items():
# While `hint` is typed as a `bytes`, and this is locally verified
# immediately above, if it has length 32 then it might match an entry in
# `self.ph_subscriptions`. It is unclear if there is a more proper means
# of handling this situation.
subscribed_peers = self.ph_subscriptions.get(hint) # type: ignore[call-overload]
if subscribed_peers is not None:
for peer in subscribed_peers:
if peer not in changes_for_peer:
changes_for_peer[peer] = set()
for record in records.values():
changes_for_peer[peer].add(record.coin_state)
for peer, changes in changes_for_peer.items():
if peer not in self.server.all_connections:
continue
ws_peer: ws.WSChiaConnection = self.server.all_connections[peer]
state = CoinStateUpdate(height, fork_height, peak_hash, list(changes))
state = CoinStateUpdate(
state_change_summary.peak.height,
state_change_summary.fork_height,
state_change_summary.peak.header_hash,
list(changes),
)
msg = make_msg(ProtocolMessageTypes.coin_state_update, state)
await ws_peer.send_message(msg)
@ -1083,9 +1093,9 @@ class FullNode:
peer: ws.WSChiaConnection,
fork_point: Optional[uint32],
wp_summaries: Optional[List[SubEpochSummary]] = None,
) -> Tuple[bool, bool, Optional[uint32], Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]]]:
advanced_peak = False
fork_height: Optional[uint32] = uint32(0)
) -> Tuple[bool, Optional[StateChangeSummary]]:
# Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i
# Returns a bool for success, as well as a StateChangeSummary if the peak was advanced
blocks_to_validate: List[FullBlock] = []
for i, block in enumerate(all_blocks):
@ -1093,7 +1103,7 @@ class FullNode:
blocks_to_validate = all_blocks[i:]
break
if len(blocks_to_validate) == 0:
return True, False, fork_height, ([], {})
return True, None
# Validates signatures in multiprocessing since they take a while, and we don't have cached transactions
# for these blocks (unlike during normal operation where we validate one at a time)
@ -1114,45 +1124,48 @@ class FullNode:
self.log.error(
f"Invalid block from peer: {peer.get_peer_logging()} {Err(pre_validation_results[i].error)}"
)
return False, advanced_peak, fork_height, ([], {})
return False, None
# Dicts because deduping
all_coin_changes: Dict[bytes32, CoinRecord] = {}
all_hint_changes: Dict[bytes, Dict[bytes32, CoinRecord]] = {}
agg_state_change_summary: Optional[StateChangeSummary] = None
for i, block in enumerate(blocks_to_validate):
assert pre_validation_results[i].required_iters is not None
result, error, fork_height, coin_changes = await self.blockchain.receive_block(
state_change_summary: Optional[StateChangeSummary]
advanced_peak = agg_state_change_summary is not None
result, error, state_change_summary = await self.blockchain.receive_block(
block, pre_validation_results[i], None if advanced_peak else fork_point
)
coin_record_list, hint_records = coin_changes
# Update all changes
for record in coin_record_list:
all_coin_changes[record.name] = record
for hint, list_of_records in hint_records.items():
if hint not in all_hint_changes:
all_hint_changes[hint] = {}
for record in list_of_records.values():
all_hint_changes[hint][record.name] = record
if result == ReceiveBlockResult.NEW_PEAK:
advanced_peak = True
assert state_change_summary is not None
# Since all blocks are contiguous, we can simply append the rollback changes and npc results
if agg_state_change_summary is None:
agg_state_change_summary = state_change_summary
else:
# Keeps the old, original fork_height, since the next blocks will have fork height h-1
# Groups up all state changes into one
agg_state_change_summary = StateChangeSummary(
state_change_summary.peak,
agg_state_change_summary.fork_height,
agg_state_change_summary.rolled_back_records + state_change_summary.rolled_back_records,
agg_state_change_summary.new_npc_results + state_change_summary.new_npc_results,
agg_state_change_summary.new_rewards + state_change_summary.new_rewards,
)
elif result == ReceiveBlockResult.INVALID_BLOCK or result == ReceiveBlockResult.DISCONNECTED_BLOCK:
if error is not None:
self.log.error(f"Error: {error}, Invalid block from peer: {peer.get_peer_logging()} ")
return False, advanced_peak, fork_height, ([], {})
return False, agg_state_change_summary
block_record = self.blockchain.block_record(block.header_hash)
if block_record.sub_epoch_summary_included is not None:
if self.weight_proof_handler is not None:
await self.weight_proof_handler.create_prev_sub_epoch_segments()
if advanced_peak:
if agg_state_change_summary is not None:
self._state_changed("new_peak")
self.log.debug(
f"Total time for {len(blocks_to_validate)} blocks: {time.time() - pre_validate_start}, "
f"advanced: {advanced_peak}"
f"advanced: True"
)
return True, advanced_peak, fork_height, (list(all_coin_changes.values()), all_hint_changes)
return True, agg_state_change_summary
async def _finish_sync(self):
"""
@ -1172,13 +1185,11 @@ class FullNode:
peak_fb: FullBlock = await self.blockchain.get_full_peak()
if peak is not None:
mempool_new_peak_result, fns_peak_result = await self.peak_post_processing(
peak_fb, peak, max(peak.height - 1, 0), None, []
)
await self.peak_post_processing_2(
peak_fb, peak, max(peak.height - 1, 0), None, ([], {}), mempool_new_peak_result, fns_peak_result
state_change_summary = StateChangeSummary(peak, max(peak.height - 1, 0), [], [], [])
ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
peak_fb, state_change_summary, None
)
await self.peak_post_processing_2(peak_fb, None, state_change_summary, ppp_result)
if peak is not None and self.weight_proof_handler is not None:
await self.weight_proof_handler.get_proof_of_weight(peak.header_hash)
@ -1261,22 +1272,22 @@ class FullNode:
async def peak_post_processing(
self,
block: FullBlock,
record: BlockRecord,
fork_height: uint32,
state_change_summary: StateChangeSummary,
peer: Optional[ws.WSChiaConnection],
coin_changes: List[CoinRecord],
):
) -> PeakPostProcessingResult:
"""
Must be called under self.blockchain.lock. This updates the internal state of the full node with the
latest peak information. It also notifies peers about the new peak.
"""
record = state_change_summary.peak
difficulty = self.blockchain.get_next_difficulty(record.header_hash, False)
sub_slot_iters = self.blockchain.get_next_slot_iters(record.header_hash, False)
self.log.info(
f"🌱 Updated peak to height {record.height}, weight {record.weight}, "
f"hh {record.header_hash}, "
f"forked at {fork_height}, rh: {record.reward_infusion_new_challenge}, "
f"forked at {state_change_summary.fork_height}, rh: {record.reward_infusion_new_challenge}, "
f"total iters: {record.total_iters}, "
f"overflow: {record.overflow}, "
f"deficit: {record.deficit}, "
@ -1288,6 +1299,17 @@ class FullNode:
f"{len(block.transactions_generator_ref_list) if block.transactions_generator else 'No tx'}"
)
if (
self.full_node_store.previous_generator is not None
and state_change_summary.fork_height < self.full_node_store.previous_generator.block_height
):
self.full_node_store.previous_generator = None
hints_to_add, lookup_coin_ids = get_hints_and_subscription_coin_ids(
state_change_summary, self.coin_subscriptions, self.ph_subscriptions
)
await self.hint_store.add_hints(hints_to_add)
sub_slots = await self.blockchain.get_sp_and_ip_sub_slots(record.header_hash)
assert sub_slots is not None
@ -1295,9 +1317,9 @@ class FullNode:
self.blockchain.clean_block_records()
fork_block: Optional[BlockRecord] = None
if fork_height != block.height - 1 and block.height != 0:
if state_change_summary.fork_height != block.height - 1 and block.height != 0:
# This is a reorg
fork_hash: Optional[bytes32] = self.blockchain.height_to_hash(fork_height)
fork_hash: Optional[bytes32] = self.blockchain.height_to_hash(state_change_summary.fork_height)
assert fork_hash is not None
fork_block = self.blockchain.block_record(fork_hash)
@ -1340,8 +1362,9 @@ class FullNode:
)
# Update the mempool (returns successful pending transactions added to the mempool)
new_npc_results: List[NPCResult] = state_change_summary.new_npc_results
mempool_new_peak_result: List[Tuple[SpendBundle, NPCResult, bytes32]] = await self.mempool_manager.new_peak(
self.blockchain.get_peak(), coin_changes
self.blockchain.get_peak(), new_npc_results[-1] if len(new_npc_results) > 0 else None
)
# Check if we detected a spent transaction, to load up our generator cache
@ -1350,23 +1373,22 @@ class FullNode:
if generator_arg:
self.log.info(f"Saving previous generator for height {block.height}")
self.full_node_store.previous_generator = generator_arg
return mempool_new_peak_result, fns_peak_result
return PeakPostProcessingResult(mempool_new_peak_result, fns_peak_result, hints_to_add, lookup_coin_ids)
async def peak_post_processing_2(
self,
block: FullBlock,
record: BlockRecord,
fork_height: uint32,
peer: Optional[ws.WSChiaConnection],
coin_changes: Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]],
mempool_peak_result: List[Tuple[SpendBundle, NPCResult, bytes32]],
fns_peak_result: FullNodeStorePeakResult,
state_change_summary: StateChangeSummary,
ppp_result: PeakPostProcessingResult,
):
"""
Does NOT need to be called under the blockchain lock. Handle other parts of post processing like communicating
with peers
"""
for bundle, result, spend_name in mempool_peak_result:
record = state_change_summary.peak
for bundle, result, spend_name in ppp_result.mempool_peak_result:
self.log.debug(f"Added transaction to mempool: {spend_name}")
mempool_item = self.mempool_manager.get_mempool_item(spend_name)
assert mempool_item is not None
@ -1382,12 +1404,12 @@ class FullNode:
await self.server.send_to_all([msg], NodeType.FULL_NODE)
# If there were pending end of slots that happen after this peak, broadcast them if they are added
if fns_peak_result.added_eos is not None:
if ppp_result.fns_peak_result.added_eos is not None:
broadcast = full_node_protocol.NewSignagePointOrEndOfSubSlot(
fns_peak_result.added_eos.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
fns_peak_result.added_eos.challenge_chain.get_hash(),
ppp_result.fns_peak_result.added_eos.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
ppp_result.fns_peak_result.added_eos.challenge_chain.get_hash(),
uint8(0),
fns_peak_result.added_eos.reward_chain.end_of_slot_vdf.challenge,
ppp_result.fns_peak_result.added_eos.reward_chain.end_of_slot_vdf.challenge,
)
msg = make_msg(ProtocolMessageTypes.new_signage_point_or_end_of_sub_slot, broadcast)
await self.server.send_to_all([msg], NodeType.FULL_NODE)
@ -1409,7 +1431,7 @@ class FullNode:
record.header_hash,
record.height,
record.weight,
fork_height,
state_change_summary.fork_height,
block.reward_chain_block.get_unfinished().get_hash(),
),
)
@ -1425,10 +1447,10 @@ class FullNode:
record.header_hash,
record.height,
record.weight,
fork_height,
state_change_summary.fork_height,
),
)
await self.update_wallets(record.height, fork_height, record.header_hash, coin_changes)
await self.update_wallets(state_change_summary, ppp_result.hints, ppp_result.lookup_coin_ids)
await self.server.send_to_all([msg], NodeType.WALLET)
self._state_changed("new_peak")
@ -1501,8 +1523,8 @@ class FullNode:
)
# This recursion ends here, we cannot recurse again because transactions_generator is not None
return await self.respond_block(block_response, peer)
coin_changes: Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]] = ([], {})
mempool_new_peak_result, fns_peak_result = None, None
state_change_summary: Optional[StateChangeSummary] = None
ppp_result: Optional[PeakPostProcessingResult] = None
async with self._blockchain_lock_high_priority:
# After acquiring the lock, check again, because another asyncio thread might have added it
if self.blockchain.contains_block(header_hash):
@ -1527,7 +1549,6 @@ class FullNode:
if Err(pre_validation_results[0].error) == Err.INVALID_PREV_BLOCK_HASH:
added = ReceiveBlockResult.DISCONNECTED_BLOCK
error_code: Optional[Err] = Err.INVALID_PREV_BLOCK_HASH
fork_height: Optional[uint32] = None
else:
raise ValueError(
f"Failed to validate block {header_hash} height "
@ -1538,24 +1559,15 @@ class FullNode:
pre_validation_results[0] if pre_validation_result is None else pre_validation_result
)
assert result_to_validate.required_iters == pre_validation_results[0].required_iters
added, error_code, fork_height, coin_changes = await self.blockchain.receive_block(
(added, error_code, state_change_summary) = await self.blockchain.receive_block(
block, result_to_validate, None
)
if (
self.full_node_store.previous_generator is not None
and fork_height is not None
and fork_height < self.full_node_store.previous_generator.block_height
):
self.full_node_store.previous_generator = None
if added == ReceiveBlockResult.ALREADY_HAVE_BLOCK:
return None
elif added == ReceiveBlockResult.INVALID_BLOCK:
assert error_code is not None
self.log.error(f"Block {header_hash} at height {block.height} is invalid with code {error_code}.")
raise ConsensusError(error_code, [header_hash])
elif added == ReceiveBlockResult.DISCONNECTED_BLOCK:
self.log.info(f"Disconnected block {header_hash} at height {block.height}")
if raise_on_disconnected:
@ -1563,11 +1575,8 @@ class FullNode:
return None
elif added == ReceiveBlockResult.NEW_PEAK:
# Only propagate blocks which extend the blockchain (becomes one of the heads)
new_peak: Optional[BlockRecord] = self.blockchain.get_peak()
assert new_peak is not None and fork_height is not None
mempool_new_peak_result, fns_peak_result = await self.peak_post_processing(
block, new_peak, fork_height, peer, coin_changes[0]
)
assert state_change_summary is not None
ppp_result = await self.peak_post_processing(block, state_change_summary, peer)
elif added == ReceiveBlockResult.ADDED_AS_ORPHAN:
self.log.info(
@ -1579,22 +1588,16 @@ class FullNode:
except asyncio.CancelledError:
# We need to make sure to always call this method even when we get a cancel exception, to make sure
# the node stays in sync
new_peak = self.blockchain.get_peak()
if added == ReceiveBlockResult.NEW_PEAK:
assert new_peak is not None
assert fork_height is not None
await self.peak_post_processing(block, new_peak, fork_height, peer, coin_changes[0])
assert state_change_summary is not None
await self.peak_post_processing(block, state_change_summary, peer)
raise
validation_time = time.time() - validation_start
if mempool_new_peak_result is not None:
assert new_peak is not None
assert fork_height is not None
assert fns_peak_result is not None
await self.peak_post_processing_2(
block, new_peak, fork_height, peer, coin_changes, mempool_new_peak_result, fns_peak_result
)
if ppp_result is not None:
assert state_change_summary is not None
await self.peak_post_processing_2(block, peer, state_change_summary, ppp_result)
percent_full_str = (
(
@ -1629,6 +1632,8 @@ class FullNode:
"k_size": block.reward_chain_block.proof_of_space.size,
"header_hash": block.header_hash,
"height": block.height,
"validation_time": validation_time,
"pre_validation_time": pre_validation_time,
}
if block.transactions_info is not None:

View File

@ -3,7 +3,7 @@ import dataclasses
import time
import traceback
from secrets import token_bytes
from typing import Callable, Dict, List, Optional, Tuple, Set
from typing import Dict, List, Optional, Tuple, Set
from blspy import AugSchemeMPL, G2Element
from chiabip158 import PyBIP158
@ -27,7 +27,7 @@ from chia.protocols.wallet_protocol import (
RespondSESInfo,
)
from chia.server.outbound_message import Message, make_msg
from chia.types.blockchain_format.coin import Coin, hash_coin_list
from chia.types.blockchain_format.coin import Coin, hash_coin_ids
from chia.types.blockchain_format.pool_target import PoolTarget
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
@ -54,9 +54,6 @@ class FullNodeAPI:
def __init__(self, full_node) -> None:
self.full_node = full_node
def _set_state_changed_callback(self, callback: Callable):
self.full_node.state_changed_callback = callback
@property
def server(self):
return self.full_node.server
@ -784,7 +781,7 @@ class FullNodeAPI:
def get_pool_sig(_1, _2) -> Optional[G2Element]:
return request.pool_signature
prev_b: Optional[BlockRecord] = self.full_node.blockchain.get_peak()
prev_b: Optional[BlockRecord] = peak
# Finds the previous block from the signage point, ensuring that the reward chain VDF is correct
if prev_b is not None:
@ -1157,14 +1154,14 @@ class FullNodeAPI:
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
for puzzle, coins in puzzlehash_coins_map.items():
addition_merkle_set.add_already_hashed(puzzle)
addition_merkle_set.add_already_hashed(hash_coin_list(coins))
addition_merkle_set.add_already_hashed(hash_coin_ids([c.name() for c in coins]))
assert addition_merkle_set.get_root() == block.foliage_transaction_block.additions_root
for puzzle_hash in request.puzzle_hashes:
result, proof = addition_merkle_set.is_included_already_hashed(puzzle_hash)
if puzzle_hash in puzzlehash_coins_map:
coins_map.append((puzzle_hash, puzzlehash_coins_map[puzzle_hash]))
hash_coin_str = hash_coin_list(puzzlehash_coins_map[puzzle_hash])
hash_coin_str = hash_coin_ids([c.name() for c in puzzlehash_coins_map[puzzle_hash]])
result_2, proof_2 = addition_merkle_set.is_included_already_hashed(hash_coin_str)
assert result
assert result_2

Some files were not shown because too many files have changed in this diff Show More