Add configuration locking (#10680)

* Add configuration locking

Extracted from https://github.com/Chia-Network/chia-blockchain/pull/10631

* note that fasteners will likely be replaced by filelock

* Fix test_multiple_writers on macOS

* create_all_ssl() doesn't need to be inside the config access lock

* add warnings about not using async within get_config_lock() get lock contexts

* no need to pre-touch the lock file

* .yaml.lock instead of just .lock

* test_multiple_writers() is sync

* Revert "add warnings about not using async within get_config_lock() get lock contexts"

This reverts commit 681af3835b.

* reduce lock context size in chia_init()

* use an exit stack in load_config()

* avoid config existence precheck

* only lock around the read in load_config()

* do not raise e, just raise

* tidy new imports

* fix queue empty check in test_config.py

* remove commented out code in test_config.py

* remove unused import

Co-authored-by: Jeff Cruikshank <jeff@chia.net>
This commit is contained in:
Kyle Altendorf 2022-03-12 09:52:47 -05:00 committed by GitHub
parent 19ee652d73
commit dbeff36ae0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 492 additions and 348 deletions

View File

@ -57,7 +57,7 @@ repos:
rev: v0.930
hooks:
- id: mypy
additional_dependencies: [pytest, pytest-asyncio, types-aiofiles, types-click, types-setuptools, types-PyYAML]
additional_dependencies: [filelock, pytest, pytest-asyncio, types-aiofiles, types-click, types-setuptools, types-PyYAML]
# This intentionally counters the settings in mypy.ini to allow a loose local
# check and a strict CI check. This difference may or may not be retained long
# term.

View File

@ -3,7 +3,7 @@ from typing import Dict, Optional
import click
from chia.util.config import load_config, save_config, str2bool
from chia.util.config import get_config_lock, load_config, save_config, str2bool
from chia.util.default_root import DEFAULT_ROOT_PATH
@ -24,172 +24,173 @@ def configure(
seeder_domain_name: str,
seeder_nameserver: str,
):
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
change_made = False
if set_node_introducer:
try:
if set_node_introducer.index(":"):
host, port = (
":".join(set_node_introducer.split(":")[:-1]),
set_node_introducer.split(":")[-1],
)
config["full_node"]["introducer_peer"]["host"] = host
config["full_node"]["introducer_peer"]["port"] = int(port)
config["introducer"]["port"] = int(port)
print("Node introducer updated")
with get_config_lock(root_path, "config.yaml"):
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml", acquire_lock=False)
change_made = False
if set_node_introducer:
try:
if set_node_introducer.index(":"):
host, port = (
":".join(set_node_introducer.split(":")[:-1]),
set_node_introducer.split(":")[-1],
)
config["full_node"]["introducer_peer"]["host"] = host
config["full_node"]["introducer_peer"]["port"] = int(port)
config["introducer"]["port"] = int(port)
print("Node introducer updated")
change_made = True
except ValueError:
print("Node introducer address must be in format [IP:Port]")
if set_farmer_peer:
try:
if set_farmer_peer.index(":"):
host, port = (
":".join(set_farmer_peer.split(":")[:-1]),
set_farmer_peer.split(":")[-1],
)
config["full_node"]["farmer_peer"]["host"] = host
config["full_node"]["farmer_peer"]["port"] = int(port)
config["harvester"]["farmer_peer"]["host"] = host
config["harvester"]["farmer_peer"]["port"] = int(port)
print("Farmer peer updated, make sure your harvester has the proper cert installed")
change_made = True
except ValueError:
print("Farmer address must be in format [IP:Port]")
if set_fullnode_port:
config["full_node"]["port"] = int(set_fullnode_port)
config["full_node"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["farmer"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["timelord"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["introducer"]["port"] = int(set_fullnode_port)
print("Default full node port updated")
change_made = True
if set_harvester_port:
config["harvester"]["port"] = int(set_harvester_port)
config["farmer"]["harvester_peer"]["port"] = int(set_harvester_port)
print("Default harvester port updated")
change_made = True
if set_log_level:
levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]
if set_log_level in levels:
config["logging"]["log_level"] = set_log_level
print(f"Logging level updated. Check {DEFAULT_ROOT_PATH}/log/debug.log")
change_made = True
except ValueError:
print("Node introducer address must be in format [IP:Port]")
if set_farmer_peer:
try:
if set_farmer_peer.index(":"):
host, port = (
":".join(set_farmer_peer.split(":")[:-1]),
set_farmer_peer.split(":")[-1],
)
config["full_node"]["farmer_peer"]["host"] = host
config["full_node"]["farmer_peer"]["port"] = int(port)
config["harvester"]["farmer_peer"]["host"] = host
config["harvester"]["farmer_peer"]["port"] = int(port)
print("Farmer peer updated, make sure your harvester has the proper cert installed")
else:
print(f"Logging level not updated. Use one of: {levels}")
if enable_upnp:
config["full_node"]["enable_upnp"] = str2bool(enable_upnp)
if str2bool(enable_upnp):
print("uPnP enabled")
else:
print("uPnP disabled")
change_made = True
if set_outbound_peer_count:
config["full_node"]["target_outbound_peer_count"] = int(set_outbound_peer_count)
print("Target outbound peer count updated")
change_made = True
if set_peer_count:
config["full_node"]["target_peer_count"] = int(set_peer_count)
print("Target peer count updated")
change_made = True
if testnet:
if testnet == "true" or testnet == "t":
print("Setting Testnet")
testnet_port = "58444"
testnet_introducer = "introducer-testnet10.chia.net"
testnet_dns_introducer = "dns-introducer-testnet10.chia.net"
bootstrap_peers = ["testnet10-node.chia.net"]
testnet = "testnet10"
config["full_node"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["port"] = int(testnet_port)
config["farmer"]["full_node_peer"]["port"] = int(testnet_port)
config["timelord"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["introducer_peer"]["port"] = int(testnet_port)
config["introducer"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["host"] = testnet_introducer
config["full_node"]["dns_servers"] = [testnet_dns_introducer]
config["wallet"]["dns_servers"] = [testnet_dns_introducer]
config["selected_network"] = testnet
config["harvester"]["selected_network"] = testnet
config["pool"]["selected_network"] = testnet
config["farmer"]["selected_network"] = testnet
config["timelord"]["selected_network"] = testnet
config["full_node"]["selected_network"] = testnet
config["ui"]["selected_network"] = testnet
config["introducer"]["selected_network"] = testnet
config["wallet"]["selected_network"] = testnet
if "seeder" in config:
config["seeder"]["port"] = int(testnet_port)
config["seeder"]["other_peers_port"] = int(testnet_port)
config["seeder"]["selected_network"] = testnet
config["seeder"]["bootstrap_peers"] = bootstrap_peers
print("Default full node port, introducer and network setting updated")
change_made = True
except ValueError:
print("Farmer address must be in format [IP:Port]")
if set_fullnode_port:
config["full_node"]["port"] = int(set_fullnode_port)
config["full_node"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["farmer"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["timelord"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["introducer"]["port"] = int(set_fullnode_port)
print("Default full node port updated")
change_made = True
if set_harvester_port:
config["harvester"]["port"] = int(set_harvester_port)
config["farmer"]["harvester_peer"]["port"] = int(set_harvester_port)
print("Default harvester port updated")
change_made = True
if set_log_level:
levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]
if set_log_level in levels:
config["logging"]["log_level"] = set_log_level
print(f"Logging level updated. Check {DEFAULT_ROOT_PATH}/log/debug.log")
change_made = True
else:
print(f"Logging level not updated. Use one of: {levels}")
if enable_upnp:
config["full_node"]["enable_upnp"] = str2bool(enable_upnp)
if str2bool(enable_upnp):
print("uPnP enabled")
else:
print("uPnP disabled")
change_made = True
if set_outbound_peer_count:
config["full_node"]["target_outbound_peer_count"] = int(set_outbound_peer_count)
print("Target outbound peer count updated")
change_made = True
if set_peer_count:
config["full_node"]["target_peer_count"] = int(set_peer_count)
print("Target peer count updated")
change_made = True
if testnet:
if testnet == "true" or testnet == "t":
print("Setting Testnet")
testnet_port = "58444"
testnet_introducer = "introducer-testnet10.chia.net"
testnet_dns_introducer = "dns-introducer-testnet10.chia.net"
bootstrap_peers = ["testnet10-node.chia.net"]
testnet = "testnet10"
config["full_node"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["port"] = int(testnet_port)
config["farmer"]["full_node_peer"]["port"] = int(testnet_port)
config["timelord"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["introducer_peer"]["port"] = int(testnet_port)
config["introducer"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["host"] = testnet_introducer
config["full_node"]["dns_servers"] = [testnet_dns_introducer]
config["wallet"]["dns_servers"] = [testnet_dns_introducer]
config["selected_network"] = testnet
config["harvester"]["selected_network"] = testnet
config["pool"]["selected_network"] = testnet
config["farmer"]["selected_network"] = testnet
config["timelord"]["selected_network"] = testnet
config["full_node"]["selected_network"] = testnet
config["ui"]["selected_network"] = testnet
config["introducer"]["selected_network"] = testnet
config["wallet"]["selected_network"] = testnet
if "seeder" in config:
config["seeder"]["port"] = int(testnet_port)
config["seeder"]["other_peers_port"] = int(testnet_port)
config["seeder"]["selected_network"] = testnet
config["seeder"]["bootstrap_peers"] = bootstrap_peers
elif testnet == "false" or testnet == "f":
print("Setting Mainnet")
mainnet_port = "8444"
mainnet_introducer = "introducer.chia.net"
mainnet_dns_introducer = "dns-introducer.chia.net"
bootstrap_peers = ["node.chia.net"]
net = "mainnet"
config["full_node"]["port"] = int(mainnet_port)
config["full_node"]["introducer_peer"]["port"] = int(mainnet_port)
config["farmer"]["full_node_peer"]["port"] = int(mainnet_port)
config["timelord"]["full_node_peer"]["port"] = int(mainnet_port)
config["wallet"]["full_node_peer"]["port"] = int(mainnet_port)
config["wallet"]["introducer_peer"]["port"] = int(mainnet_port)
config["introducer"]["port"] = int(mainnet_port)
config["full_node"]["introducer_peer"]["host"] = mainnet_introducer
config["full_node"]["dns_servers"] = [mainnet_dns_introducer]
config["selected_network"] = net
config["harvester"]["selected_network"] = net
config["pool"]["selected_network"] = net
config["farmer"]["selected_network"] = net
config["timelord"]["selected_network"] = net
config["full_node"]["selected_network"] = net
config["ui"]["selected_network"] = net
config["introducer"]["selected_network"] = net
config["wallet"]["selected_network"] = net
print("Default full node port, introducer and network setting updated")
if "seeder" in config:
config["seeder"]["port"] = int(mainnet_port)
config["seeder"]["other_peers_port"] = int(mainnet_port)
config["seeder"]["selected_network"] = net
config["seeder"]["bootstrap_peers"] = bootstrap_peers
print("Default full node port, introducer and network setting updated")
change_made = True
else:
print("Please choose True or False")
if peer_connect_timeout:
config["full_node"]["peer_connect_timeout"] = int(peer_connect_timeout)
change_made = True
elif testnet == "false" or testnet == "f":
print("Setting Mainnet")
mainnet_port = "8444"
mainnet_introducer = "introducer.chia.net"
mainnet_dns_introducer = "dns-introducer.chia.net"
bootstrap_peers = ["node.chia.net"]
net = "mainnet"
config["full_node"]["port"] = int(mainnet_port)
config["full_node"]["introducer_peer"]["port"] = int(mainnet_port)
config["farmer"]["full_node_peer"]["port"] = int(mainnet_port)
config["timelord"]["full_node_peer"]["port"] = int(mainnet_port)
config["wallet"]["full_node_peer"]["port"] = int(mainnet_port)
config["wallet"]["introducer_peer"]["port"] = int(mainnet_port)
config["introducer"]["port"] = int(mainnet_port)
config["full_node"]["introducer_peer"]["host"] = mainnet_introducer
config["full_node"]["dns_servers"] = [mainnet_dns_introducer]
config["selected_network"] = net
config["harvester"]["selected_network"] = net
config["pool"]["selected_network"] = net
config["farmer"]["selected_network"] = net
config["timelord"]["selected_network"] = net
config["full_node"]["selected_network"] = net
config["ui"]["selected_network"] = net
config["introducer"]["selected_network"] = net
config["wallet"]["selected_network"] = net
if "seeder" in config:
config["seeder"]["port"] = int(mainnet_port)
config["seeder"]["other_peers_port"] = int(mainnet_port)
config["seeder"]["selected_network"] = net
config["seeder"]["bootstrap_peers"] = bootstrap_peers
print("Default full node port, introducer and network setting updated")
if crawler_db_path is not None and "seeder" in config:
config["seeder"]["crawler_db_path"] = crawler_db_path
change_made = True
else:
print("Please choose True or False")
if peer_connect_timeout:
config["full_node"]["peer_connect_timeout"] = int(peer_connect_timeout)
change_made = True
if crawler_minimum_version_count is not None and "seeder" in config:
config["seeder"]["minimum_version_count"] = crawler_minimum_version_count
change_made = True
if crawler_db_path is not None and "seeder" in config:
config["seeder"]["crawler_db_path"] = crawler_db_path
change_made = True
if seeder_domain_name is not None and "seeder" in config:
config["seeder"]["domain_name"] = seeder_domain_name
change_made = True
if crawler_minimum_version_count is not None and "seeder" in config:
config["seeder"]["minimum_version_count"] = crawler_minimum_version_count
change_made = True
if seeder_nameserver is not None and "seeder" in config:
config["seeder"]["nameserver"] = seeder_nameserver
change_made = True
if seeder_domain_name is not None and "seeder" in config:
config["seeder"]["domain_name"] = seeder_domain_name
change_made = True
if seeder_nameserver is not None and "seeder" in config:
config["seeder"]["nameserver"] = seeder_nameserver
change_made = True
if change_made:
print("Restart any running chia services for changes to take effect")
save_config(root_path, "config.yaml", config)
if change_made:
print("Restart any running chia services for changes to take effect")
save_config(root_path, "config.yaml", config)
@click.command("configure", short_help="Modify configuration")

View File

@ -3,7 +3,7 @@ from pathlib import Path
import sys
from time import time
from chia.util.config import load_config, save_config
from chia.util.config import load_config, save_config, get_config_lock
from chia.util.path import mkdir, path_from_root
from chia.util.ints import uint32
from chia.types.blockchain_format.sized_bytes import bytes32
@ -44,11 +44,12 @@ def db_upgrade_func(
if update_config:
print("updating config.yaml")
config = load_config(root_path, "config.yaml")
new_db_path = db_pattern.replace("_v1_", "_v2_")
config["full_node"]["database_path"] = new_db_path
print(f"database_path: {new_db_path}")
save_config(root_path, "config.yaml", config)
with get_config_lock(root_path, "config.yaml"):
config = load_config(root_path, "config.yaml", acquire_lock=False)
new_db_path = db_pattern.replace("_v1_", "_v2_")
config["full_node"]["database_path"] = new_db_path
print(f"database_path: {new_db_path}")
save_config(root_path, "config.yaml", config)
print(f"\n\nLEAVING PREVIOUS DB FILE UNTOUCHED {in_db_path}\n")

View File

@ -21,6 +21,7 @@ from chia.util.config import (
load_config,
save_config,
unflatten_properties,
get_config_lock,
)
from chia.util.keychain import Keychain
from chia.util.path import mkdir, path_from_root
@ -74,88 +75,90 @@ def check_keys(new_root: Path, keychain: Optional[Keychain] = None) -> None:
print("No keys are present in the keychain. Generate them with 'chia keys generate'")
return None
config: Dict = load_config(new_root, "config.yaml")
pool_child_pubkeys = [master_sk_to_pool_sk(sk).get_g1() for sk, _ in all_sks]
all_targets = []
stop_searching_for_farmer = "xch_target_address" not in config["farmer"]
stop_searching_for_pool = "xch_target_address" not in config["pool"]
number_of_ph_to_search = 50
selected = config["selected_network"]
prefix = config["network_overrides"]["config"][selected]["address_prefix"]
with get_config_lock(new_root, "config.yaml"):
config: Dict = load_config(new_root, "config.yaml", acquire_lock=False)
pool_child_pubkeys = [master_sk_to_pool_sk(sk).get_g1() for sk, _ in all_sks]
all_targets = []
stop_searching_for_farmer = "xch_target_address" not in config["farmer"]
stop_searching_for_pool = "xch_target_address" not in config["pool"]
number_of_ph_to_search = 50
selected = config["selected_network"]
prefix = config["network_overrides"]["config"][selected]["address_prefix"]
intermediates = {}
for sk, _ in all_sks:
intermediates[bytes(sk)] = {
"observer": master_sk_to_wallet_sk_unhardened_intermediate(sk),
"non-observer": master_sk_to_wallet_sk_intermediate(sk),
}
for i in range(number_of_ph_to_search):
if stop_searching_for_farmer and stop_searching_for_pool and i > 0:
break
intermediates = {}
for sk, _ in all_sks:
intermediate_n = intermediates[bytes(sk)]["non-observer"]
intermediate_o = intermediates[bytes(sk)]["observer"]
intermediates[bytes(sk)] = {
"observer": master_sk_to_wallet_sk_unhardened_intermediate(sk),
"non-observer": master_sk_to_wallet_sk_intermediate(sk),
}
all_targets.append(
encode_puzzle_hash(
create_puzzlehash_for_pk(_derive_path_unhardened(intermediate_o, [i]).get_g1()), prefix
for i in range(number_of_ph_to_search):
if stop_searching_for_farmer and stop_searching_for_pool and i > 0:
break
for sk, _ in all_sks:
intermediate_n = intermediates[bytes(sk)]["non-observer"]
intermediate_o = intermediates[bytes(sk)]["observer"]
all_targets.append(
encode_puzzle_hash(
create_puzzlehash_for_pk(_derive_path_unhardened(intermediate_o, [i]).get_g1()), prefix
)
)
all_targets.append(
encode_puzzle_hash(create_puzzlehash_for_pk(_derive_path(intermediate_n, [i]).get_g1()), prefix)
)
if all_targets[-1] == config["farmer"].get("xch_target_address") or all_targets[-2] == config[
"farmer"
].get("xch_target_address"):
stop_searching_for_farmer = True
if all_targets[-1] == config["pool"].get("xch_target_address") or all_targets[-2] == config["pool"].get(
"xch_target_address"
):
stop_searching_for_pool = True
# Set the destinations, if necessary
updated_target: bool = False
if "xch_target_address" not in config["farmer"]:
print(
f"Setting the xch destination for the farmer reward (1/8 plus fees, solo and pooling)"
f" to {all_targets[0]}"
)
all_targets.append(
encode_puzzle_hash(create_puzzlehash_for_pk(_derive_path(intermediate_n, [i]).get_g1()), prefix)
config["farmer"]["xch_target_address"] = all_targets[0]
updated_target = True
elif config["farmer"]["xch_target_address"] not in all_targets:
print(
f"WARNING: using a farmer address which we might not have the private"
f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding "
f"{config['farmer']['xch_target_address']} with {all_targets[0]}"
)
if all_targets[-1] == config["farmer"].get("xch_target_address") or all_targets[-2] == config["farmer"].get(
"xch_target_address"
):
stop_searching_for_farmer = True
if all_targets[-1] == config["pool"].get("xch_target_address") or all_targets[-2] == config["pool"].get(
"xch_target_address"
):
stop_searching_for_pool = True
# Set the destinations, if necessary
updated_target: bool = False
if "xch_target_address" not in config["farmer"]:
print(
f"Setting the xch destination for the farmer reward (1/8 plus fees, solo and pooling) to {all_targets[0]}"
)
config["farmer"]["xch_target_address"] = all_targets[0]
updated_target = True
elif config["farmer"]["xch_target_address"] not in all_targets:
print(
f"WARNING: using a farmer address which we might not have the private"
f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding "
f"{config['farmer']['xch_target_address']} with {all_targets[0]}"
)
if "pool" not in config:
config["pool"] = {}
if "xch_target_address" not in config["pool"]:
print(f"Setting the xch destination address for pool reward (7/8 for solo only) to {all_targets[0]}")
config["pool"]["xch_target_address"] = all_targets[0]
updated_target = True
elif config["pool"]["xch_target_address"] not in all_targets:
print(
f"WARNING: using a pool address which we might not have the private"
f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding "
f"{config['pool']['xch_target_address']} with {all_targets[0]}"
)
if updated_target:
print(
f"To change the XCH destination addresses, edit the `xch_target_address` entries in"
f" {(new_root / 'config' / 'config.yaml').absolute()}."
)
if "pool" not in config:
config["pool"] = {}
if "xch_target_address" not in config["pool"]:
print(f"Setting the xch destination address for pool reward (7/8 for solo only) to {all_targets[0]}")
config["pool"]["xch_target_address"] = all_targets[0]
updated_target = True
elif config["pool"]["xch_target_address"] not in all_targets:
print(
f"WARNING: using a pool address which we might not have the private"
f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding "
f"{config['pool']['xch_target_address']} with {all_targets[0]}"
)
if updated_target:
print(
f"To change the XCH destination addresses, edit the `xch_target_address` entries in"
f" {(new_root / 'config' / 'config.yaml').absolute()}."
)
# Set the pool pks in the farmer
pool_pubkeys_hex = set(bytes(pk).hex() for pk in pool_child_pubkeys)
if "pool_public_keys" in config["farmer"]:
for pk_hex in config["farmer"]["pool_public_keys"]:
# Add original ones in config
pool_pubkeys_hex.add(pk_hex)
# Set the pool pks in the farmer
pool_pubkeys_hex = set(bytes(pk).hex() for pk in pool_child_pubkeys)
if "pool_public_keys" in config["farmer"]:
for pk_hex in config["farmer"]["pool_public_keys"]:
# Add original ones in config
pool_pubkeys_hex.add(pk_hex)
config["farmer"]["pool_public_keys"] = pool_pubkeys_hex
save_config(new_root, "config.yaml", config)
config["farmer"]["pool_public_keys"] = pool_pubkeys_hex
save_config(new_root, "config.yaml", config)
def copy_files_rec(old_path: Path, new_path: Path):
@ -193,13 +196,15 @@ def migrate_from(
copy_files_rec(old_path, new_path)
# update config yaml with new keys
config: Dict = load_config(new_root, "config.yaml")
config_str: str = initial_config_file("config.yaml")
default_config: Dict = yaml.safe_load(config_str)
flattened_keys = unflatten_properties({k: "" for k in do_not_migrate_settings})
dict_add_new_default(config, default_config, flattened_keys)
save_config(new_root, "config.yaml", config)
with get_config_lock(new_root, "config.yaml"):
config: Dict = load_config(new_root, "config.yaml", acquire_lock=False)
config_str: str = initial_config_file("config.yaml")
default_config: Dict = yaml.safe_load(config_str)
flattened_keys = unflatten_properties({k: "" for k in do_not_migrate_settings})
dict_add_new_default(config, default_config, flattened_keys)
save_config(new_root, "config.yaml", config)
create_all_ssl(new_root)
@ -457,12 +462,14 @@ def chia_init(
check_keys(root_path)
config: Dict
if v1_db:
config = load_config(root_path, "config.yaml")
db_pattern = config["full_node"]["database_path"]
new_db_path = db_pattern.replace("_v2_", "_v1_")
config["full_node"]["database_path"] = new_db_path
save_config(root_path, "config.yaml", config)
with get_config_lock(root_path, "config.yaml"):
config = load_config(root_path, "config.yaml", acquire_lock=False)
db_pattern = config["full_node"]["database_path"]
new_db_path = db_pattern.replace("_v2_", "_v1_")
config["full_node"]["database_path"] = new_db_path
save_config(root_path, "config.yaml", config)
else:
config = load_config(root_path, "config.yaml")["full_node"]
db_path_replaced: str = config["database_path"].replace("CHALLENGE", config["selected_network"])

View File

@ -40,7 +40,7 @@ from chia.types.blockchain_format.proof_of_space import ProofOfSpace
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.bech32m import decode_puzzle_hash
from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config, save_config, config_path_for_filename
from chia.util.config import load_config, save_config, config_path_for_filename, get_config_lock
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint16, uint32, uint64
from chia.util.keychain import Keychain
@ -581,34 +581,36 @@ class Farmer:
}
def set_reward_targets(self, farmer_target_encoded: Optional[str], pool_target_encoded: Optional[str]):
config = load_config(self._root_path, "config.yaml")
if farmer_target_encoded is not None:
self.farmer_target_encoded = farmer_target_encoded
self.farmer_target = decode_puzzle_hash(farmer_target_encoded)
config["farmer"]["xch_target_address"] = farmer_target_encoded
if pool_target_encoded is not None:
self.pool_target_encoded = pool_target_encoded
self.pool_target = decode_puzzle_hash(pool_target_encoded)
config["pool"]["xch_target_address"] = pool_target_encoded
save_config(self._root_path, "config.yaml", config)
with get_config_lock(self._root_path, "config.yaml"):
config = load_config(self._root_path, "config.yaml", acquire_lock=False)
if farmer_target_encoded is not None:
self.farmer_target_encoded = farmer_target_encoded
self.farmer_target = decode_puzzle_hash(farmer_target_encoded)
config["farmer"]["xch_target_address"] = farmer_target_encoded
if pool_target_encoded is not None:
self.pool_target_encoded = pool_target_encoded
self.pool_target = decode_puzzle_hash(pool_target_encoded)
config["pool"]["xch_target_address"] = pool_target_encoded
save_config(self._root_path, "config.yaml", config)
async def set_payout_instructions(self, launcher_id: bytes32, payout_instructions: str):
for p2_singleton_puzzle_hash, pool_state_dict in self.pool_state.items():
if launcher_id == pool_state_dict["pool_config"].launcher_id:
config = load_config(self._root_path, "config.yaml")
new_list = []
pool_list = config["pool"].get("pool_list", [])
if pool_list is not None:
for list_element in pool_list:
if hexstr_to_bytes(list_element["launcher_id"]) == bytes(launcher_id):
list_element["payout_instructions"] = payout_instructions
new_list.append(list_element)
with get_config_lock(self._root_path, "config.yaml"):
config = load_config(self._root_path, "config.yaml", acquire_lock=False)
new_list = []
pool_list = config["pool"].get("pool_list", [])
if pool_list is not None:
for list_element in pool_list:
if hexstr_to_bytes(list_element["launcher_id"]) == bytes(launcher_id):
list_element["payout_instructions"] = payout_instructions
new_list.append(list_element)
config["pool"]["pool_list"] = new_list
save_config(self._root_path, "config.yaml", config)
# Force a GET /farmer which triggers the PUT /farmer if it detects the changed instructions
pool_state_dict["next_farmer_update"] = 0
return
config["pool"]["pool_list"] = new_list
save_config(self._root_path, "config.yaml", config)
# Force a GET /farmer which triggers the PUT /farmer if it detects the changed instructions
pool_state_dict["next_farmer_update"] = 0
return
self.log.warning(f"Launcher id: {launcher_id} not found")

View File

@ -9,7 +9,7 @@ from blspy import G1Element, PrivateKey
from chiapos import DiskProver
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.config import load_config, save_config
from chia.util.config import load_config, save_config, get_config_lock
log = logging.getLogger(__name__)
@ -79,28 +79,30 @@ def get_plot_filenames(root_path: Path) -> Dict[Path, List[Path]]:
def add_plot_directory(root_path: Path, str_path: str) -> Dict:
log.debug(f"add_plot_directory {str_path}")
config = load_config(root_path, "config.yaml")
if str(Path(str_path).resolve()) not in get_plot_directories(root_path, config):
config["harvester"]["plot_directories"].append(str(Path(str_path).resolve()))
save_config(root_path, "config.yaml", config)
return config
with get_config_lock(root_path, "config.yaml"):
config = load_config(root_path, "config.yaml", acquire_lock=False)
if str(Path(str_path).resolve()) not in get_plot_directories(root_path, config):
config["harvester"]["plot_directories"].append(str(Path(str_path).resolve()))
save_config(root_path, "config.yaml", config)
return config
def remove_plot_directory(root_path: Path, str_path: str) -> None:
log.debug(f"remove_plot_directory {str_path}")
config = load_config(root_path, "config.yaml")
str_paths: List[str] = get_plot_directories(root_path, config)
# If path str matches exactly, remove
if str_path in str_paths:
str_paths.remove(str_path)
with get_config_lock(root_path, "config.yaml"):
config = load_config(root_path, "config.yaml", acquire_lock=False)
str_paths: List[str] = get_plot_directories(root_path, config)
# If path str matches exactly, remove
if str_path in str_paths:
str_paths.remove(str_path)
# If path matches full path, remove
new_paths = [Path(sp).resolve() for sp in str_paths]
if Path(str_path).resolve() in new_paths:
new_paths.remove(Path(str_path).resolve())
# If path matches full path, remove
new_paths = [Path(sp).resolve() for sp in str_paths]
if Path(str_path).resolve() in new_paths:
new_paths.remove(Path(str_path).resolve())
config["harvester"]["plot_directories"] = [str(np) for np in new_paths]
save_config(root_path, "config.yaml", config)
config["harvester"]["plot_directories"] = [str(np) for np in new_paths]
save_config(root_path, "config.yaml", config)
def remove_plot(path: Path):

View File

@ -7,7 +7,7 @@ from blspy import G1Element
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config, save_config
from chia.util.config import get_config_lock, load_config, save_config
from chia.util.streamable import Streamable, streamable
"""
@ -61,28 +61,28 @@ def load_pool_config(root_path: Path) -> List[PoolWalletConfig]:
# TODO: remove this a few versions after 1.3, since authentication_public_key is deprecated. This is here to support
# downgrading to versions older than 1.3.
def add_auth_key(root_path: Path, config_entry: PoolWalletConfig, auth_key: G1Element):
config = load_config(root_path, "config.yaml")
pool_list = config["pool"].get("pool_list", [])
updated = False
if pool_list is not None:
for pool_config_dict in pool_list:
try:
if (
G1Element.from_bytes(hexstr_to_bytes(pool_config_dict["owner_public_key"]))
== config_entry.owner_public_key
):
auth_key_hex = bytes(auth_key).hex()
if pool_config_dict.get("authentication_public_key", "") != auth_key_hex:
pool_config_dict["authentication_public_key"] = auth_key_hex
updated = True
except Exception as e:
log.error(f"Exception updating config: {pool_config_dict} {e}")
if updated:
config["pool"]["pool_list"] = pool_list
save_config(root_path, "config.yaml", config)
with get_config_lock(root_path, "config.yaml"):
config = load_config(root_path, "config.yaml", acquire_lock=False)
pool_list = config["pool"].get("pool_list", [])
updated = False
if pool_list is not None:
for pool_config_dict in pool_list:
try:
if hexstr_to_bytes(pool_config_dict["owner_public_key"]) == bytes(config_entry.owner_public_key):
auth_key_hex = bytes(auth_key).hex()
if pool_config_dict.get("authentication_public_key", "") != auth_key_hex:
pool_config_dict["authentication_public_key"] = auth_key_hex
updated = True
except Exception as e:
log.error(f"Exception updating config: {pool_config_dict} {e}")
if updated:
log.info(f"Updating pool config for auth key: {auth_key}")
config["pool"]["pool_list"] = pool_list
save_config(root_path, "config.yaml", config)
async def update_pool_config(root_path: Path, pool_config_list: List[PoolWalletConfig]):
full_config = load_config(root_path, "config.yaml")
full_config["pool"]["pool_list"] = [c.to_json_dict() for c in pool_config_list]
save_config(root_path, "config.yaml", full_config)
with get_config_lock(root_path, "config.yaml"):
full_config = load_config(root_path, "config.yaml", acquire_lock=False)
full_config["pool"]["pool_list"] = [c.to_json_dict() for c in pool_config_list]
save_config(root_path, "config.yaml", full_config)

View File

@ -1,13 +1,18 @@
import argparse
import contextlib
import logging
import os
import shutil
import sys
import tempfile
import time
import traceback
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Union
import pkg_resources
import yaml
from filelock import BaseFileLock, FileLock
from typing_extensions import Literal
from chia.util.path import mkdir
@ -15,6 +20,8 @@ from chia.util.path import mkdir
PEER_DB_PATH_KEY_DEPRECATED = "peer_db_path" # replaced by "peers_file_path"
WALLET_PEERS_PATH_KEY_DEPRECATED = "wallet_peers_path" # replaced by "wallet_peers_file_path"
log = logging.getLogger(__name__)
def initial_config_file(filename: Union[str, Path]) -> str:
return pkg_resources.resource_string(__name__, f"initial-{filename}").decode()
@ -41,24 +48,36 @@ def config_path_for_filename(root_path: Path, filename: Union[str, Path]) -> Pat
return root_path / "config" / filename
def get_config_lock(root_path: Path, filename: Union[str, Path]) -> BaseFileLock:
config_path = config_path_for_filename(root_path, filename)
lock_path: Path = config_path.with_name(config_path.name + ".lock")
return FileLock(lock_path)
def save_config(root_path: Path, filename: Union[str, Path], config_data: Any):
# This must be called under an acquired config lock
path: Path = config_path_for_filename(root_path, filename)
tmp_path: Path = path.with_suffix("." + str(os.getpid()))
with open(tmp_path, "w") as f:
yaml.safe_dump(config_data, f)
try:
os.replace(str(tmp_path), path)
except PermissionError:
shutil.move(str(tmp_path), str(path))
with tempfile.TemporaryDirectory(dir=path.parent) as tmp_dir:
tmp_path: Path = Path(tmp_dir) / Path(filename)
with open(tmp_path, "w") as f:
yaml.safe_dump(config_data, f)
try:
os.replace(str(tmp_path), path)
except PermissionError:
shutil.move(str(tmp_path), str(path))
def load_config(
root_path: Path,
filename: Union[str, Path],
sub_config: Optional[str] = None,
exit_on_error=True,
exit_on_error: bool = True,
acquire_lock: bool = True,
) -> Dict:
# This must be called under an acquired config lock, or acquire_lock should be True
path = config_path_for_filename(root_path, filename)
if not path.is_file():
if not exit_on_error:
raise ValueError("Config not found")
@ -66,10 +85,26 @@ def load_config(
print("** please run `chia init` to migrate or create new config files **")
# TODO: fix this hack
sys.exit(-1)
r = yaml.safe_load(open(path, "r"))
if sub_config is not None:
r = r.get(sub_config)
return r
# This loop should not be necessary due to the config lock, but it's kept here just in case
for i in range(10):
try:
with contextlib.ExitStack() as exit_stack:
if acquire_lock:
exit_stack.enter_context(get_config_lock(root_path, filename))
with open(path, "r") as opened_config_file:
r = yaml.safe_load(opened_config_file)
if r is None:
log.error(f"yaml.safe_load returned None: {path}")
time.sleep(i * 0.1)
continue
if sub_config is not None:
r = r.get(sub_config)
return r
except Exception as e:
tb = traceback.format_exc()
log.error(f"Error loading file: {tb} {e} Retrying {i}")
time.sleep(i * 0.1)
raise RuntimeError("Was not able to read config file successfully")
def load_config_cli(root_path: Path, filename: str, sub_config: Optional[str] = None) -> Dict:

View File

@ -79,7 +79,7 @@ def get_all_ssl_file_paths(root_path: Path) -> Tuple[List[Path], List[Path]]:
# Check the Mozilla Root CAs as well
all_certs.append(Path(get_mozilla_ca_crt()))
except ValueError:
except (FileNotFoundError, ValueError):
pass
return all_certs, all_keys

View File

@ -17,7 +17,8 @@ dependencies = [
"colorlog==5.0.1", # Adds color to logs
"concurrent-log-handler==0.9.19", # Concurrently log and rotate logs
"cryptography==3.4.7", # Python cryptography library for TLS - keyring conflict
"fasteners==0.16.3", # For interprocess file locking
"fasteners==0.16.3", # For interprocess file locking, expected to be replaced by filelock
"filelock==3.4.2", # For reading and writing config multiprocess and multithread safely (non-reentrant locks)
"keyring==23.0.1", # Store keys in MacOS Keychain, Windows Credential Locker
"keyrings.cryptfile==1.3.4", # Secure storage for keys on Linux (Will be replaced)
# "keyrings.cryptfile==1.3.8", # Secure storage for keys on Linux (Will be replaced)

View File

@ -1,16 +1,28 @@
import asyncio
import copy
import shutil
import tempfile
from concurrent.futures import ProcessPoolExecutor
import pytest
import random
import yaml
from chia.util.config import create_default_chia_config, initial_config_file, load_config, save_config
from chia.util.config import (
config_path_for_filename,
create_default_chia_config,
get_config_lock,
initial_config_file,
load_config,
save_config,
)
from chia.util.path import mkdir
from multiprocessing import Pool, TimeoutError
from multiprocessing import Pool, Queue, TimeoutError
from pathlib import Path
from threading import Thread
from time import sleep
from typing import Dict
from typing import Dict, Optional
# Commented-out lines are preserved to aid in debugging the multiprocessing tests
# import logging
@ -20,47 +32,105 @@ from typing import Dict
# log = logging.getLogger(__name__)
def write_config(root_path: Path, config: Dict):
def write_config(
root_path: Path,
config: Dict,
atomic_write: bool,
do_sleep: bool,
iterations: int,
error_queue: Optional[Queue] = None,
):
"""
Wait for a random amount of time and write out the config data. With a large
config, we expect save_config() to require multiple writes.
"""
sleep(random.random())
# log.warning(f"[pid:{os.getpid()}:{threading.get_ident()}] write_config")
# save_config(root_path=root_path, filename="config.yaml", config_data=modified_config)
save_config(root_path=root_path, filename="config.yaml", config_data=config)
try:
for i in range(iterations):
# This is a small sleep to get interweaving reads and writes
sleep(0.05)
if do_sleep:
sleep(random.random())
if atomic_write:
# Note that this is usually atomic but in certain circumstances in Windows it can copy the file,
# leading to a non-atomic operation.
with get_config_lock(root_path, "config.yaml"):
save_config(root_path=root_path, filename="config.yaml", config_data=config)
else:
path: Path = config_path_for_filename(root_path, filename="config.yaml")
with get_config_lock(root_path, "config.yaml"):
with tempfile.TemporaryDirectory(dir=path.parent) as tmp_dir:
tmp_path: Path = Path(tmp_dir) / Path("config.yaml")
with open(tmp_path, "w") as f:
yaml.safe_dump(config, f)
shutil.copy2(str(tmp_path), str(path))
except Exception as e:
if error_queue is not None:
error_queue.put(e)
raise
def read_and_compare_config(root_path: Path, default_config: Dict):
def read_and_compare_config(
root_path: Path, default_config: Dict, do_sleep: bool, iterations: int, error_queue: Optional[Queue] = None
):
"""
Wait for a random amount of time, read the config and compare with the
default config data. If the config file is partially-written or corrupt,
load_config should fail or return bad data
"""
# Wait a moment. The read and write threads are delayed by a random amount
# in an attempt to interleave their execution.
sleep(random.random())
# log.warning(f"[pid:{os.getpid()}:{threading.get_ident()}] read_and_compare_config")
config: Dict = load_config(root_path=root_path, filename="config.yaml")
assert len(config) > 0
# if config != default_config:
# log.error(f"[pid:{os.getpid()}:{threading.get_ident()}] bad config: {config}")
# log.error(f"[pid:{os.getpid()}:{threading.get_ident()}] default config: {default_config}")
assert config == default_config
try:
for i in range(iterations):
# This is a small sleep to get interweaving reads and writes
sleep(0.05)
# Wait a moment. The read and write threads are delayed by a random amount
# in an attempt to interleave their execution.
if do_sleep:
sleep(random.random())
with get_config_lock(root_path, "config.yaml"):
config: Dict = load_config(root_path=root_path, filename="config.yaml", acquire_lock=False)
assert config == default_config
except Exception as e:
if error_queue is not None:
error_queue.put(e)
raise
async def create_reader_and_writer_tasks(root_path: Path, default_config: Dict):
"""
Spin-off reader and writer threads and wait for completion
"""
thread1 = Thread(target=write_config, kwargs={"root_path": root_path, "config": default_config})
thread2 = Thread(target=read_and_compare_config, kwargs={"root_path": root_path, "default_config": default_config})
error_queue: Queue = Queue()
thread1 = Thread(
target=write_config,
kwargs={
"root_path": root_path,
"config": default_config,
"atomic_write": False,
"do_sleep": True,
"iterations": 1,
"error_queue": error_queue,
},
)
thread2 = Thread(
target=read_and_compare_config,
kwargs={
"root_path": root_path,
"default_config": default_config,
"do_sleep": True,
"iterations": 1,
"error_queue": error_queue,
},
)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
if not error_queue.empty():
raise error_queue.get()
def run_reader_and_writer_tasks(root_path: Path, default_config: Dict):
@ -197,8 +267,6 @@ class TestConfig:
"""
Test whether multiple readers/writers encounter data corruption. When using non-atomic operations
to write to the config, partial/incomplete writes can cause readers to yield bad/corrupt data.
Access to config.yaml isn't currently synchronized, so the best we can currently hope for is that
the file contents are written-to as a whole.
"""
# Artifically inflate the size of the default config. This is done to (hopefully) force
# save_config() to require multiple writes. When save_config() was using shutil.move()
@ -216,3 +284,30 @@ class TestConfig:
res.get(timeout=60)
except TimeoutError:
pytest.skip("Timed out waiting for reader/writer processes to complete")
@pytest.mark.asyncio
async def test_non_atomic_writes(self, root_path_populated_with_config, default_config_dict):
"""
Test whether one continuous writer (writing constantly, but not atomically) will interfere with many
concurrent readers.
"""
default_config_dict["xyz"] = "x" * 32768
root_path: Path = root_path_populated_with_config
save_config(root_path=root_path, filename="config.yaml", config_data=default_config_dict)
with ProcessPoolExecutor(max_workers=4) as pool:
all_tasks = []
for i in range(10):
all_tasks.append(
asyncio.get_running_loop().run_in_executor(
pool, read_and_compare_config, root_path, default_config_dict, False, 100, None
)
)
if i % 2 == 0:
all_tasks.append(
asyncio.get_running_loop().run_in_executor(
pool, write_config, root_path, default_config_dict, False, False, 100, None
)
)
await asyncio.gather(*all_tasks)