Merge commit '2acf03e850fb883dc6ca5a554f5f827097157ac1' (latest main) into atari-merge_main_2acf03e850fb883dc6ca5a554f5f827097157ac1

This commit is contained in:
Kyle Altendorf 2022-01-19 10:37:04 -05:00
commit 4453f32363
No known key found for this signature in database
GPG Key ID: 5715D880FF005192
28 changed files with 127 additions and 88 deletions

View File

@ -17,7 +17,7 @@ jobs:
build:
name: MacOS pools Tests
runs-on: ${{ matrix.os }}
timeout-minutes: 30
timeout-minutes: 45
strategy:
fail-fast: false
max-parallel: 4

View File

@ -17,7 +17,7 @@ jobs:
build:
name: Ubuntu pools Test
runs-on: ${{ matrix.os }}
timeout-minutes: 30
timeout-minutes: 45
strategy:
fail-fast: false
max-parallel: 4

3
.gitignore vendored
View File

@ -98,3 +98,6 @@ chia-blockchain-gui/src/locales/_build
build_scripts\win_build
build_scripts/win_build
win_code_sign_cert.p12
# chia-blockchain wheel build folder
build/

View File

@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2021 Chia Network
Copyright 2022 Chia Network
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@ -67,4 +67,4 @@ async def setup_db(name: str, db_version: int) -> DBWrapper:
await connection.execute("pragma journal_mode=wal")
await connection.execute("pragma synchronous=full")
return DBWrapper(connection, False, db_version)
return DBWrapper(connection, db_version)

View File

@ -16,12 +16,8 @@ rm chia-blockchain-gui/temp.json || true
( cd "$PWD/chia-blockchain-gui" && git checkout HEAD -- package-lock.json ) || true
cd "$PWD" || true
# Do our best to get rid of any globally installed notarize-cli versions so the version in the current build script is
# installed without conflicting with the other version that might be installed
PATH=$(brew --prefix node@16)/bin:$PATH || true
export PATH
npm uninstall -g notarize-cli || true
npm uninstall -g @chia-network/notarize-cli || true
npm uninstall -g electron-installer-dmg || true
npm uninstall -g electron-packager || true
npm uninstall -g electron/electron-osx-sign || true
# Clean up old globally installed node_modules that might conflict with the current build
rm -rf /opt/homebrew/lib/node_modules || true
# Clean up any installed versions of node so we can start fresh
brew list | grep "^node\@\|^node$" | xargs -L1 brew uninstall || true

View File

@ -93,9 +93,9 @@ def configure(
if testnet == "true" or testnet == "t":
print("Setting Testnet")
testnet_port = "58444"
testnet_introducer = "beta1_introducer.chia.net"
testnet_dns_introducer = "dns-introducer-testnet7.chia.net"
testnet = "testnet7"
testnet_introducer = "introducer-testnet10.chia.net"
testnet_dns_introducer = "dns-introducer-testnet10.chia.net"
testnet = "testnet10"
config["full_node"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["port"] = int(testnet_port)
config["farmer"]["full_node_peer"]["port"] = int(testnet_port)

View File

@ -96,7 +96,7 @@ def configure(
if testnet == "true" or testnet == "t":
print("Updating Chia Seeder to testnet settings")
port = 58444
network = "testnet7"
network = "testnet10"
bootstrap = ["testnet-node.chia.net"]
config["seeder"]["port"] = port

View File

@ -661,12 +661,13 @@ class Blockchain(BlockchainInterface):
unfinished_block,
bytes(generator),
)
error, npc_result_bytes = await task
if error is not None:
raise ConsensusError(error)
npc_result_bytes = await task
if npc_result_bytes is None:
raise ConsensusError(Err.UNKNOWN)
return NPCResult.from_bytes(npc_result_bytes)
ret = NPCResult.from_bytes(npc_result_bytes)
if ret.error is not None:
raise ConsensusError(ret.error)
return ret
def contains_block(self, header_hash: bytes32) -> bool:
"""

View File

@ -329,7 +329,7 @@ def _run_generator(
constants_dict: bytes,
unfinished_block_bytes: bytes,
block_generator_bytes: bytes,
) -> Tuple[Optional[Err], Optional[bytes]]:
) -> Optional[bytes]:
"""
Runs the CLVM generator from bytes inputs. This is meant to be called under a ProcessPoolExecutor, in order to
validate the heavy parts of a block (clvm program) in a different process.
@ -346,11 +346,8 @@ def _run_generator(
cost_per_byte=constants.COST_PER_BYTE,
mempool_mode=False,
)
if npc_result.error is not None:
return Err(npc_result.error), None
return bytes(npc_result)
except ValidationError as e:
return e.code, None
return bytes(NPCResult(uint16(e.code.value), [], uint64(0)))
except Exception:
return Err.UNKNOWN, None
return None, bytes(npc_result)
return bytes(NPCResult(uint16(Err.UNKNOWN.value), [], uint64(0)))

View File

@ -97,15 +97,7 @@ class BlockStore:
await self.db.execute("CREATE INDEX IF NOT EXISTS height on block_records(height)")
if self.db_wrapper.allow_upgrades:
await self.db.execute("DROP INDEX IF EXISTS hh")
await self.db.execute("DROP INDEX IF EXISTS is_block")
await self.db.execute("DROP INDEX IF EXISTS peak")
await self.db.execute(
"CREATE INDEX IF NOT EXISTS is_peak_eq_1_idx on block_records(is_peak) where is_peak = 1"
)
else:
await self.db.execute("CREATE INDEX IF NOT EXISTS peak on block_records(is_peak) where is_peak = 1")
await self.db.execute("CREATE INDEX IF NOT EXISTS peak on block_records(is_peak) where is_peak = 1")
await self.db.commit()
self.block_cache = LRUCache(1000)

View File

@ -74,9 +74,6 @@ class CoinStore:
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_spent_index on coin_record(spent_index)")
if self.db_wrapper.allow_upgrades:
await self.coin_record_db.execute("DROP INDEX IF EXISTS coin_spent")
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_puzzle_hash on coin_record(puzzle_hash)")
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_parent_index on coin_record(coin_parent)")
@ -85,6 +82,13 @@ class CoinStore:
self.coin_record_cache = LRUCache(cache_size)
return self
async def num_unspent(self) -> int:
async with self.coin_record_db.execute("SELECT COUNT(*) FROM coin_record WHERE spent_index=0") as cursor:
row = await cursor.fetchone()
if row is not None:
return row[0]
return 0
def maybe_from_hex(self, field: Any) -> bytes:
if self.db_wrapper.db_version == 2:
return field

View File

@ -176,9 +176,7 @@ class FullNode:
db_version: int = await lookup_db_version(self.connection)
self.db_wrapper = DBWrapper(
self.connection, self.config.get("allow_database_upgrades", False), db_version=db_version
)
self.db_wrapper = DBWrapper(self.connection, db_version=db_version)
self.block_store = await BlockStore.create(self.db_wrapper)
self.sync_store = await SyncStore.create()
self.hint_store = await HintStore.create(self.db_wrapper)
@ -214,6 +212,13 @@ class FullNode:
time_taken = time.time() - start_time
if self.blockchain.get_peak() is None:
self.log.info(f"Initialized with empty blockchain time taken: {int(time_taken)}s")
num_unspent = await self.coin_store.num_unspent()
if num_unspent > 0:
self.log.error(
f"Inconsistent blockchain DB file! Could not find peak block but found {num_unspent} coins! "
"This is a fatal error. The blockchain database may be corrupt"
)
raise RuntimeError("corrupt blockchain DB")
else:
self.log.info(
f"Blockchain initialized to peak {self.blockchain.get_peak().header_hash} height"

View File

@ -127,17 +127,6 @@ class HarvesterAPI:
)
continue
# Look up local_sk from plot to save locked memory
(
pool_public_key_or_puzzle_hash,
farmer_public_key,
local_master_sk,
) = parse_plot_info(plot_info.prover.get_memo())
local_sk = master_sk_to_local_sk(local_master_sk)
include_taproot = plot_info.pool_contract_puzzle_hash is not None
plot_public_key = ProofOfSpace.generate_plot_public_key(
local_sk.get_g1(), farmer_public_key, include_taproot
)
responses.append(
(
quality_str,
@ -145,7 +134,7 @@ class HarvesterAPI:
sp_challenge_hash,
plot_info.pool_public_key,
plot_info.pool_contract_puzzle_hash,
plot_public_key,
plot_info.plot_public_key,
uint8(plot_info.prover.get_size()),
proof_xs,
),

View File

@ -352,8 +352,8 @@ class PoolWallet:
await self.update_pool_config(True)
p2_puzzle_hash: bytes32 = (await self.get_current_state()).p2_singleton_puzzle_hash
await self.wallet_state_manager.add_interested_puzzle_hash(p2_puzzle_hash, self.wallet_id, False)
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id, create_puzzle_hashes=False)
await self.wallet_state_manager.add_interested_puzzle_hash(p2_puzzle_hash, self.wallet_id, False)
return self
@staticmethod

View File

@ -4,7 +4,6 @@ from pathlib import Path
from typing import Callable, Dict, List, Optional, Tuple, Set, Any
from blspy import PrivateKey, G1Element
from clvm_tools import binutils
from chia.consensus.block_rewards import calculate_base_farmer_reward
from chia.pools.pool_wallet import PoolWallet
@ -13,7 +12,6 @@ from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.server.outbound_message import NodeType, make_msg
from chia.simulator.simulator_protocol import FarmNewBlockProtocol
from chia.types.announcement import Announcement
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
@ -1185,8 +1183,10 @@ class WalletRpcApi:
coin_announcements = {
Announcement(
bytes32.from_hexstr(announcement["coin_id"]),
bytes(Program.to(binutils.assemble(announcement["message"]))),
hexstr_to_bytes(announcement["morph_bytes"]) if "morph_bytes" in announcement else None,
hexstr_to_bytes(announcement["message"]),
hexstr_to_bytes(announcement["morph_bytes"])
if "morph_bytes" in announcement and len(announcement["morph_bytes"]) > 0
else None,
)
for announcement in request["coin_announcements"]
}
@ -1200,8 +1200,10 @@ class WalletRpcApi:
puzzle_announcements = {
Announcement(
bytes32.from_hexstr(announcement["puzzle_hash"]),
bytes(Program.to(binutils.assemble(announcement["message"]))),
hexstr_to_bytes(announcement["morph_bytes"]) if "morph_bytes" in announcement else None,
hexstr_to_bytes(announcement["message"]),
hexstr_to_bytes(announcement["morph_bytes"])
if "morph_bytes" in announcement and len(announcement["morph_bytes"]) > 0
else None,
)
for announcement in request["puzzle_announcements"]
}

View File

@ -27,6 +27,7 @@ MAX_CONCURRENT_OUTBOUND_CONNECTIONS = 70
NETWORK_ID_DEFAULT_PORTS = {
"mainnet": 8444,
"testnet7": 58444,
"testnet10": 58444,
"testnet8": 58445,
}

View File

@ -11,12 +11,10 @@ class DBWrapper:
db: aiosqlite.Connection
lock: asyncio.Lock
allow_upgrades: bool
db_version: int
def __init__(self, connection: aiosqlite.Connection, allow_upgrades: bool = False, db_version: int = 1):
def __init__(self, connection: aiosqlite.Connection, db_version: int = 1):
self.db = connection
self.allow_upgrades = allow_upgrades
self.lock = asyncio.Lock()
self.db_version = db_version

View File

@ -67,6 +67,17 @@ network_overrides: &network_overrides
MEMPOOL_BLOCK_BUFFER: 50
EPOCH_BLOCKS: 768
DIFFICULTY_STARTING: 30
testnet10:
AGG_SIG_ME_ADDITIONAL_DATA: ae83525ba8d1dd3f09b277de18ca3e43fc0af20d20c4b3e92ef2a48bd291ccb2
DIFFICULTY_CONSTANT_FACTOR: 10052721566054
DIFFICULTY_STARTING: 30
EPOCH_BLOCKS: 768
GENESIS_CHALLENGE: ae83525ba8d1dd3f09b277de18ca3e43fc0af20d20c4b3e92ef2a48bd291ccb2
GENESIS_PRE_FARM_FARMER_PUZZLE_HASH: 3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af
GENESIS_PRE_FARM_POOL_PUZZLE_HASH: d23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc
MEMPOOL_BLOCK_BUFFER: 10
MIN_PLOT_SIZE: 18
NETWORK_TYPE: 1
config:
mainnet:
address_prefix: "xch"
@ -85,6 +96,9 @@ network_overrides: &network_overrides
testnet7:
address_prefix: "txch"
default_full_node_port: 58444
testnet10:
address_prefix: "txch"
default_full_node_port: 58444
selected_network: &selected_network "mainnet"
ALERTS_URL: https://download.chia.net/notify/mainnet_alert.txt
@ -303,14 +317,6 @@ full_node:
# the particular system we're running on. Defaults to "full".
db_sync: "auto"
# when this is true, minor data base updates may be performed on startup.
# the database upgrades are minor, in that they typically remove unnevessary
# indices or re-build certain indices. The intention of these updates is to
# improve performance of the node at the expense of a one-time startup cost.
# also note that enabling this may prohibit down-grading the node to a
# previous version, or may trigger an index rebuild on startup on down-grades.
allow_database_upgrades: False
# Run multiple nodes with different databases by changing the database_path
database_path: db/blockchain_v1_CHALLENGE.sqlite
# peer_db_path is deprecated and has been replaced by peers_file_path
@ -446,6 +452,8 @@ wallet:
# see description for full_node.db_sync
db_sync: auto
connect_to_unknown_peers: True
# The minimum height that we care about for our transactions. Set to zero
# If we are restoring from private key and don't know the height.
starting_height: 0
@ -454,6 +462,9 @@ wallet:
initial_num_public_keys: 100
initial_num_public_keys_new_wallet: 5
dns_servers:
- "dns-introducer.chia.net"
full_node_peer:
host: *self_hostname
port: 8444

View File

@ -10,10 +10,10 @@ MARMOT = {
"symbol": "MRMT",
}
DUCK_SAUCE = {
STABLY_USDS = {
"asset_id": "6d95dae356e32a71db5ddcb42224754a02524c615c5fc35f568c2af04774e589",
"name": "Duck Sauce",
"symbol": "DSC",
"name": "Stably USD",
"symbol": "USDS",
}
CHIA_HOLIDAY_TOKEN = {
@ -25,6 +25,6 @@ CHIA_HOLIDAY_TOKEN = {
DEFAULT_CATS = {
SPACEBUCKS["asset_id"]: SPACEBUCKS,
MARMOT["asset_id"]: MARMOT,
DUCK_SAUCE["asset_id"]: DUCK_SAUCE,
STABLY_USDS["asset_id"]: STABLY_USDS,
CHIA_HOLIDAY_TOKEN["asset_id"]: CHIA_HOLIDAY_TOKEN,
}

View File

@ -338,7 +338,7 @@ class WalletNode:
self.server.on_connect = self.on_connect
network_name = self.config["selected_network"]
connect_to_unknown_peers = self.config.get("connect_to_unknown_peers", False)
connect_to_unknown_peers = self.config.get("connect_to_unknown_peers", True)
if connect_to_unknown_peers:
self.wallet_peers = WalletPeers(
self.server,

View File

@ -29,7 +29,7 @@ dependencies = [
"click==7.1.2", # For the CLI
"dnspythonchia==2.2.0", # Query DNS seeds
"watchdog==2.1.6", # Filesystem event watching - watches keyring.yaml
"dnslib==0.9.14", # dns lib
"dnslib==0.9.17", # dns lib
"typing-extensions==4.0.1", # typing backports like Protocol and TypedDict
"zstd==1.5.0.4",
"packaging==21.0",

View File

@ -163,6 +163,7 @@ class BlockTools:
mkdir(self.plot_dir)
mkdir(self.temp_dir)
self.expected_plots: Dict[bytes32, Path] = {}
self.created_plots: int = 0
self.total_result = PlotRefreshResult()
def test_callback(event: PlotRefreshEvents, update_result: PlotRefreshResult):
@ -230,7 +231,7 @@ class BlockTools:
save_config(self.root_path, "config.yaml", self._config)
async def setup_plots(self):
assert len(self.expected_plots) == 0
assert self.created_plots == 0
# OG Plots
for i in range(15):
await self.new_plot()
@ -292,8 +293,9 @@ class BlockTools:
plot_keys,
self.root_path,
use_datetime=False,
test_private_keys=[AugSchemeMPL.key_gen(std_hash(len(self.expected_plots).to_bytes(2, "big")))],
test_private_keys=[AugSchemeMPL.key_gen(std_hash(self.created_plots.to_bytes(2, "big")))],
)
self.created_plots += 1
plot_id_new: Optional[bytes32] = None
path_new: Path = Path()

View File

@ -186,6 +186,38 @@ class TestCoinStoreWithBlocks:
assert record.spent
assert record.spent_block_index == block.height
@pytest.mark.asyncio
async def test_num_unspent(self, db_version):
blocks = bt.get_consecutive_blocks(37, [])
expect_unspent = 0
test_excercised = False
async with DBConnection(db_version) as db_wrapper:
coin_store = await CoinStore.create(db_wrapper)
for block in blocks:
if not block.is_transaction_block():
continue
if block.is_transaction_block():
assert block.foliage_transaction_block is not None
removals: List[bytes32] = []
additions: List[Coin] = []
await coin_store.new_block(
block.height,
block.foliage_transaction_block.timestamp,
block.get_included_reward_coins(),
additions,
removals,
)
expect_unspent += len(block.get_included_reward_coins())
assert await coin_store.num_unspent() == expect_unspent
test_excercised = expect_unspent > 0
assert test_excercised
@pytest.mark.asyncio
@pytest.mark.parametrize("cache_size", [0, 10, 100000])
async def test_rollback(self, cache_size: uint32, db_version):

View File

@ -469,9 +469,14 @@ async def test_plot_info_caching(test_environment):
refresh_tester: PlotRefreshTester = PlotRefreshTester(env.root_path)
plot_manager = refresh_tester.plot_manager
plot_manager.cache.load()
assert len(plot_manager.cache) == len(plot_manager.cache)
assert len(plot_manager.cache) == len(env.refresh_tester.plot_manager.cache)
for plot_id, cache_entry in env.refresh_tester.plot_manager.cache.items():
cache_entry_new = plot_manager.cache.get(plot_id)
assert cache_entry_new.pool_public_key == cache_entry.pool_public_key
assert cache_entry_new.pool_contract_puzzle_hash == cache_entry.pool_contract_puzzle_hash
assert cache_entry_new.plot_public_key == cache_entry.plot_public_key
await refresh_tester.run(expected_result)
for path, plot_info in plot_manager.plots.items():
for path, plot_info in env.refresh_tester.plot_manager.plots.items():
assert path in plot_manager.plots
assert plot_manager.plots[path].prover.get_filename() == plot_info.prover.get_filename()
assert plot_manager.plots[path].prover.get_id() == plot_info.prover.get_id()
@ -482,9 +487,9 @@ async def test_plot_info_caching(test_environment):
assert plot_manager.plots[path].plot_public_key == plot_info.plot_public_key
assert plot_manager.plots[path].file_size == plot_info.file_size
assert plot_manager.plots[path].time_modified == plot_info.time_modified
assert plot_manager.plot_filename_paths == plot_manager.plot_filename_paths
assert plot_manager.failed_to_open_filenames == plot_manager.failed_to_open_filenames
assert plot_manager.no_key_filenames == plot_manager.no_key_filenames
assert plot_manager.plot_filename_paths == env.refresh_tester.plot_manager.plot_filename_paths
assert plot_manager.failed_to_open_filenames == env.refresh_tester.plot_manager.failed_to_open_filenames
assert plot_manager.no_key_filenames == env.refresh_tester.plot_manager.no_key_filenames
plot_manager.stop_refreshing()
# Modify the content of the plot_manager.dat
with open(plot_manager.cache.path(), "r+b") as file:

1
tests/pools/config.py Normal file
View File

@ -0,0 +1 @@
job_timeout = 45

View File

@ -26,7 +26,7 @@ async def create_blockchain(constants: ConsensusConstants, db_version: int):
db_path.unlink()
blockchain_db_counter += 1
connection = await aiosqlite.connect(db_path)
wrapper = DBWrapper(connection, False, db_version)
wrapper = DBWrapper(connection, db_version)
coin_store = await CoinStore.create(wrapper)
store = await BlockStore.create(wrapper)
hint_store = await HintStore.create(wrapper)

View File

@ -13,7 +13,7 @@ class DBConnection:
if self.db_path.exists():
self.db_path.unlink()
self.connection = await aiosqlite.connect(self.db_path)
return DBWrapper(self.connection, False, self.db_version)
return DBWrapper(self.connection, self.db_version)
async def __aexit__(self, exc_t, exc_v, exc_tb):
await self.connection.close()