apply pyupgrade --py38-plus --keep-runtime-typing (#14646)

This commit is contained in:
Kyle Altendorf 2023-10-24 13:01:23 -04:00 committed by GitHub
parent 493d36bb3b
commit 90c1441af0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
96 changed files with 427 additions and 508 deletions

View File

@ -13,6 +13,13 @@ repos:
entry: ./activated.py python tests/build-init-files.py -v --root .
language: system
pass_filenames: false
- repo: local
hooks:
- id: pyupgrade
name: pyupgrade
entry: ./activated.py pyupgrade --py38-plus --keep-runtime-typing
language: system
types: [python]
- repo: local
hooks:
- id: black

View File

@ -83,11 +83,11 @@ def print_row(
stdev_iterations: Union[str, float],
end: str = "\n",
) -> None:
mode = "{0:<10}".format(f"{mode}")
us_per_iteration = "{0:<12}".format(f"{us_per_iteration}")
stdev_us_per_iteration = "{0:>20}".format(f"{stdev_us_per_iteration}")
avg_iterations = "{0:>18}".format(f"{avg_iterations}")
stdev_iterations = "{0:>22}".format(f"{stdev_iterations}")
mode = "{:<10}".format(f"{mode}")
us_per_iteration = "{:<12}".format(f"{us_per_iteration}")
stdev_us_per_iteration = "{:>20}".format(f"{stdev_us_per_iteration}")
avg_iterations = "{:>18}".format(f"{avg_iterations}")
stdev_iterations = "{:>22}".format(f"{stdev_iterations}")
print(f"{mode} | {us_per_iteration} | {stdev_us_per_iteration} | {avg_iterations} | {stdev_iterations}", end=end)
@ -195,10 +195,10 @@ def pop_data(key: str, *, old: Dict[str, Any], new: Dict[str, Any]) -> Tuple[Any
def print_compare_row(c0: str, c1: Union[str, float], c2: Union[str, float], c3: Union[str, float]) -> None:
c0 = "{0:<12}".format(f"{c0}")
c1 = "{0:<16}".format(f"{c1}")
c2 = "{0:<16}".format(f"{c2}")
c3 = "{0:<12}".format(f"{c3}")
c0 = "{:<12}".format(f"{c0}")
c1 = "{:<16}".format(f"{c1}")
c2 = "{:<16}".format(f"{c2}")
c3 = "{:<12}".format(f"{c3}")
print(f"{c0} | {c1} | {c2} | {c3}")

View File

@ -138,7 +138,7 @@ class FromDB:
def wallet_type_name(
wallet_type: int,
) -> str:
if wallet_type in set(wt.value for wt in WalletType):
if wallet_type in {wt.value for wt in WalletType}:
return f"{WalletType(wallet_type).name} ({wallet_type})"
else:
return f"INVALID_WALLET_TYPE ({wallet_type})"
@ -271,7 +271,7 @@ class WalletDBReader:
# Check for invalid wallet types in users_wallets
invalid_wallet_types = set()
for row in rows:
if row[2] not in set(wt.value for wt in WalletType):
if row[2] not in {wt.value for wt in WalletType}:
invalid_wallet_types.add(row[2])
if len(invalid_wallet_types) > 0:
errors.append(f"Invalid Wallet Types found in table users_wallets: {invalid_wallet_types}")
@ -331,7 +331,7 @@ class WalletDBReader:
wrong_type = defaultdict(list)
for d in derivation_paths:
if d.wallet_type not in set(wt.value for wt in WalletType):
if d.wallet_type not in {wt.value for wt in WalletType}:
invalid_wallet_types.append(d.wallet_type)
if d.wallet_id not in wallet_id_to_type:
missing_wallet_ids.append(d.wallet_id)

View File

@ -463,7 +463,7 @@ async def create_spend_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[
if additions_file is None and (address is None or amount is None):
raise ValueError("Must include a json specification or an address / amount pair.")
if additions_file: # pragma: no cover
with open(additions_file, "r") as f:
with open(additions_file) as f:
additions_dict = json.load(f)
additions = []
for addition in additions_dict:

View File

@ -77,10 +77,8 @@ async def challenges(farmer_rpc_port: Optional[int], limit: int) -> None:
for signage_point in signage_points:
print(
(
f"Hash: {signage_point['signage_point']['challenge_hash']} "
f"Index: {signage_point['signage_point']['signage_point_index']}"
)
f"Hash: {signage_point['signage_point']['challenge_hash']} "
f"Index: {signage_point['signage_point']['signage_point_index']}"
)

View File

@ -145,7 +145,7 @@ def check_keys(new_root: Path, keychain: Optional[Keychain] = None) -> None:
)
# Set the pool pks in the farmer
pool_pubkeys_hex = set(bytes(pk).hex() for pk in pool_child_pubkeys)
pool_pubkeys_hex = {bytes(pk).hex() for pk in pool_child_pubkeys}
if "pool_public_keys" in config["farmer"]:
for pk_hex in config["farmer"]["pool_public_keys"]:
# Add original ones in config

View File

@ -110,8 +110,8 @@ def status_cmd(json_output: bool) -> None:
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
def print_row(c0: str, c1: str) -> None:
c0 = "{0:<12}".format(f"{c0}")
c1 = "{0:<9}".format(f"{c1}")
c0 = "{:<12}".format(f"{c0}")
c1 = "{:<9}".format(f"{c1}")
print(f"{c0}{c1}")
status_data = {}

View File

@ -690,7 +690,7 @@ async def take_offer(
async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
if os.path.exists(file):
filepath = pathlib.Path(file)
with open(filepath, "r") as ffile:
with open(filepath) as ffile:
offer_hex: str = ffile.read()
ffile.close()
else:

View File

@ -394,7 +394,7 @@ async def validate_block_body(
if len(unspent_records) != len(removals_from_db):
# some coins could not be found in the DB. We need to find out which
# ones and look for them in additions_since_fork
found: Set[bytes32] = set([u.name for u in unspent_records])
found: Set[bytes32] = {u.name for u in unspent_records}
for rem in removals_from_db:
if rem in found:
continue

View File

@ -123,7 +123,7 @@ class Blockchain(BlockchainInterface):
multiprocessing_context: Optional[BaseContext] = None,
*,
single_threaded: bool = False,
) -> "Blockchain":
) -> Blockchain:
"""
Initializes a blockchain with the BlockRecords from disk, assuming they have all been
validated. Uses the genesis block given in override_constants, or as a fallback,

View File

@ -78,7 +78,7 @@ class ConsensusConstants:
PLOT_FILTER_64_HEIGHT: uint32
PLOT_FILTER_32_HEIGHT: uint32
def replace_str_to_bytes(self, **changes: Any) -> "ConsensusConstants":
def replace_str_to_bytes(self, **changes: Any) -> ConsensusConstants:
"""
Overrides str (hex) values with bytes.
"""

View File

@ -790,7 +790,7 @@ class WebSocketServer:
async def _track_plotting_progress(self, config, loop: asyncio.AbstractEventLoop):
file_path = config["out_file"]
with open(file_path, "r") as fp:
with open(file_path) as fp:
await self._watch_file_changes(config, fp, loop)
def _common_plotting_command_args(self, request: Any, ignoreCount: bool) -> List[str]:
@ -1079,7 +1079,7 @@ class WebSocketServer:
self._post_process_plotting_job(config)
except (subprocess.SubprocessError, IOError):
except (subprocess.SubprocessError, OSError):
log.exception(f"problem starting {service_name}") # lgtm [py/clear-text-logging-sensitive-data]
error = Exception("Start plotting failed")
config["state"] = PlotState.FINISHED
@ -1240,7 +1240,7 @@ class WebSocketServer:
process, pid_path = launch_service(self.root_path, exe_command)
self.services[service_command] = [process]
success = True
except (subprocess.SubprocessError, IOError):
except (subprocess.SubprocessError, OSError):
log.exception(f"problem starting {service_command}")
error = "start failed"

View File

@ -712,7 +712,7 @@ class DataLayer:
# Subscribe to all local tree_ids that we can find on chain.
local_tree_ids = await self.data_store.get_tree_ids()
subscription_tree_ids = set(subscription.tree_id for subscription in subscriptions)
subscription_tree_ids = {subscription.tree_id for subscription in subscriptions}
for local_id in local_tree_ids:
if local_id not in subscription_tree_ids:
try:

View File

@ -92,7 +92,7 @@ class DataLayerServer:
content = reader.read()
response = web.Response(
content_type="application/octet-stream",
headers={"Content-Disposition": "attachment;filename={}".format(filename)},
headers={"Content-Disposition": f"attachment;filename={filename}"},
body=content,
)
return response

View File

@ -112,7 +112,7 @@ class Side(IntEnum):
LEFT = 0
RIGHT = 1
def other(self) -> "Side":
def other(self) -> Side:
if self == Side.LEFT:
return Side.RIGHT
@ -154,7 +154,7 @@ class TerminalNode:
return Program.to(self.key), Program.to(self.value)
@classmethod
def from_row(cls, row: aiosqlite.Row) -> "TerminalNode":
def from_row(cls, row: aiosqlite.Row) -> TerminalNode:
return cls(
hash=bytes32(row["hash"]),
# generation=row["generation"],
@ -173,9 +173,9 @@ class ProofOfInclusionLayer:
@classmethod
def from_internal_node(
cls,
internal_node: "InternalNode",
internal_node: InternalNode,
traversal_child_hash: bytes32,
) -> "ProofOfInclusionLayer":
) -> ProofOfInclusionLayer:
return ProofOfInclusionLayer(
other_hash_side=internal_node.other_child_side(hash=traversal_child_hash),
other_hash=internal_node.other_child_hash(hash=traversal_child_hash),
@ -183,7 +183,7 @@ class ProofOfInclusionLayer:
)
@classmethod
def from_hashes(cls, primary_hash: bytes32, other_hash_side: Side, other_hash: bytes32) -> "ProofOfInclusionLayer":
def from_hashes(cls, primary_hash: bytes32, other_hash_side: Side, other_hash: bytes32) -> ProofOfInclusionLayer:
combined_hash = calculate_internal_hash(
hash=primary_hash,
other_hash_side=other_hash_side,
@ -250,7 +250,7 @@ class InternalNode:
atom: None = None
@classmethod
def from_row(cls, row: aiosqlite.Row) -> "InternalNode":
def from_row(cls, row: aiosqlite.Row) -> InternalNode:
return cls(
hash=bytes32(row["hash"]),
# generation=row["generation"],
@ -285,7 +285,7 @@ class Root:
status: Status
@classmethod
def from_row(cls, row: aiosqlite.Row) -> "Root":
def from_row(cls, row: aiosqlite.Row) -> Root:
raw_node_hash = row["node_hash"]
if raw_node_hash is None:
node_hash = None
@ -308,7 +308,7 @@ class Root:
}
@classmethod
def unmarshal(cls, marshalled: Dict[str, Any]) -> "Root":
def unmarshal(cls, marshalled: Dict[str, Any]) -> Root:
return cls(
tree_id=bytes32.from_hexstr(marshalled["tree_id"]),
node_hash=None if marshalled["node_hash"] is None else bytes32.from_hexstr(marshalled["node_hash"]),

View File

@ -95,7 +95,7 @@ class Mirror:
}
@classmethod
def from_json_dict(cls, json_dict: Dict[str, Any]) -> "Mirror":
def from_json_dict(cls, json_dict: Dict[str, Any]) -> Mirror:
return cls(
bytes32.from_hexstr(json_dict["coin_id"]),
bytes32.from_hexstr(json_dict["launcher_id"]),
@ -505,7 +505,7 @@ class DataLayerWallet:
)
root_announce = Announcement(second_full_puz.get_tree_hash(), b"$")
if puzzle_announcements_to_consume is None:
puzzle_announcements_to_consume = set((root_announce,))
puzzle_announcements_to_consume = {root_announce}
else:
puzzle_announcements_to_consume.add(root_announce)
second_singleton_record = SingletonRecord(
@ -1160,7 +1160,7 @@ class DataLayerWallet:
).get_tree_hash_precalc(record.inner_puzzle_hash)
assert record.lineage_proof.parent_name is not None
assert record.lineage_proof.amount is not None
return set([Coin(record.lineage_proof.parent_name, puzhash, record.lineage_proof.amount)])
return {Coin(record.lineage_proof.parent_name, puzhash, record.lineage_proof.amount)}
@staticmethod
async def make_update_offer(

View File

@ -50,7 +50,7 @@ class DataStore:
@classmethod
async def create(
cls, database: Union[str, Path], uri: bool = False, sql_log_path: Optional[Path] = None
) -> "DataStore":
) -> DataStore:
db_wrapper = await DBWrapper2.create(
database=database,
uri=uri,
@ -471,7 +471,7 @@ class DataStore:
if len(bad_node_hashes) > 0:
raise NodeHashError(node_hashes=bad_node_hashes)
_checks: Tuple[Callable[["DataStore"], Awaitable[None]], ...] = (
_checks: Tuple[Callable[[DataStore], Awaitable[None]], ...] = (
_check_roots_are_incrementing,
_check_hashes,
)
@ -1190,7 +1190,7 @@ class DataStore:
tree_id=tree_id,
root_hash=previous_root.node_hash,
)
known_hashes: Set[bytes32] = set(node.hash for node in previous_internal_nodes)
known_hashes: Set[bytes32] = {node.hash for node in previous_internal_nodes}
else:
known_hashes = set()
internal_nodes: List[InternalNode] = await self.get_internal_nodes(
@ -1418,7 +1418,7 @@ class DataStore:
)
old_urls = set()
if old_subscription is not None:
old_urls = set(server_info.url for server_info in old_subscription.servers_info)
old_urls = {server_info.url for server_info in old_subscription.servers_info}
new_servers = [server_info for server_info in subscription.servers_info if server_info.url not in old_urls]
for server_info in new_servers:
await writer.execute(
@ -1556,14 +1556,14 @@ class DataStore:
return set()
if len(new_pairs) == 0 and hash_2 != bytes32([0] * 32):
return set()
insertions = set(
insertions = {
DiffData(type=OperationType.INSERT, key=node.key, value=node.value)
for node in new_pairs
if node not in old_pairs
)
deletions = set(
}
deletions = {
DiffData(type=OperationType.DELETE, key=node.key, value=node.value)
for node in old_pairs
if node not in new_pairs
)
}
return set.union(insertions, deletions)

View File

@ -55,29 +55,25 @@ class DataLayerStore:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS singleton_records("
"coin_id blob PRIMARY KEY,"
" launcher_id blob,"
" root blob,"
" inner_puzzle_hash blob,"
" confirmed tinyint,"
" confirmed_at_height int,"
" proof blob,"
" generation int," # This first singleton will be 0, then 1, and so on. This is handled by the DB.
" timestamp int)"
)
"CREATE TABLE IF NOT EXISTS singleton_records("
"coin_id blob PRIMARY KEY,"
" launcher_id blob,"
" root blob,"
" inner_puzzle_hash blob,"
" confirmed tinyint,"
" confirmed_at_height int,"
" proof blob,"
" generation int," # This first singleton will be 0, then 1, and so on. This is handled by the DB.
" timestamp int)"
)
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS mirrors("
"coin_id blob PRIMARY KEY,"
"launcher_id blob,"
"amount blob,"
"urls blob,"
"ours tinyint)"
)
"CREATE TABLE IF NOT EXISTS mirrors("
"coin_id blob PRIMARY KEY,"
"launcher_id blob,"
"amount blob,"
"urls blob,"
"ours tinyint)"
)
await conn.execute(

View File

@ -235,13 +235,13 @@ async def http_download(
size = int(resp.headers.get("content-length", 0))
log.debug(f"Downloading delta file {filename}. Size {size} bytes.")
progress_byte = 0
progress_percentage = "{:.0%}".format(0)
progress_percentage = f"{0:.0%}"
target_filename = client_folder.joinpath(filename)
with target_filename.open(mode="wb") as f:
async for chunk, _ in resp.content.iter_chunks():
f.write(chunk)
progress_byte += len(chunk)
new_percentage = "{:.0%}".format(progress_byte / size)
new_percentage = f"{progress_byte / size:.0%}"
if new_percentage != progress_percentage:
progress_percentage = new_percentage
log.info(f"Downloading delta file {filename}. {progress_percentage} of {size} bytes.")

View File

@ -302,7 +302,7 @@ class S3Plugin:
self.stores = read_store_ids_from_config(config)
def update_config(self) -> None:
with open("s3_plugin_config.yml", "r") as file:
with open("s3_plugin_config.yml") as file:
full_config = yaml.safe_load(file)
full_config[self.instance_name]["stores"] = [store.marshal() for store in self.stores]
@ -387,7 +387,7 @@ def make_app(config: Dict[str, Any], instance_name: str) -> web.Application:
def load_config(instance: str) -> Any:
with open("s3_plugin_config.yml", "r") as f:
with open("s3_plugin_config.yml") as f:
full_config = yaml.safe_load(f)
return full_config[instance]

View File

@ -56,7 +56,7 @@ class BlockHeightMap:
__ses_filename: Path
@classmethod
async def create(cls, blockchain_dir: Path, db: DBWrapper2) -> "BlockHeightMap":
async def create(cls, blockchain_dir: Path, db: DBWrapper2) -> BlockHeightMap:
if db.db_version != 2:
raise RuntimeError(f"BlockHeightMap does not support database schema v{db.db_version}")
self = BlockHeightMap()

View File

@ -39,7 +39,7 @@ class SyncStore:
batch_syncing: Set[bytes32] = field(default_factory=set)
# Set of nodes which we are backtrack syncing from, and how many threads
_backtrack_syncing: collections.defaultdict[bytes32, int] = field(
default_factory=lambda: collections.defaultdict(lambda: 0),
default_factory=lambda: collections.defaultdict(int),
)
def set_sync_mode(self, sync_mode: bool) -> None:

View File

@ -34,7 +34,7 @@ class PathListDelta(DeltaType):
removals: List[str] = field(default_factory=list)
@staticmethod
def from_lists(old: List[str], new: List[str]) -> "PathListDelta":
def from_lists(old: List[str], new: List[str]) -> PathListDelta:
return PathListDelta([x for x in new if x not in old], [x for x in old if x not in new])

View File

@ -51,7 +51,7 @@ class CacheEntry:
last_use: float
@classmethod
def from_disk_prover(cls, prover: DiskProver) -> "CacheEntry":
def from_disk_prover(cls, prover: DiskProver) -> CacheEntry:
(
pool_public_key_or_puzzle_hash,
farmer_public_key,

View File

@ -94,7 +94,7 @@ def create_pool_state(
pool_url: Optional[str],
relative_lock_height: uint32,
) -> PoolState:
if state not in set(s.value for s in PoolSingletonState):
if state not in {s.value for s in PoolSingletonState}:
raise AssertionError("state {state} is not a valid PoolSingletonState,")
ps = PoolState(
POOL_PROTOCOL_VERSION, uint8(state), target_puzzle_hash, owner_pubkey, pool_url, relative_lock_height

View File

@ -1185,10 +1185,8 @@ class WalletRpcApi:
coins[coin_record.coin] = metadata
if len(coins) >= batch_size:
tx_id_list.extend(
(
await self.service.wallet_state_manager.spend_clawback_coins(
coins, tx_fee, tx_config, request.get("force", False), extra_conditions=extra_conditions
)
await self.service.wallet_state_manager.spend_clawback_coins(
coins, tx_fee, tx_config, request.get("force", False), extra_conditions=extra_conditions
)
)
coins = {}
@ -1196,10 +1194,8 @@ class WalletRpcApi:
log.error(f"Failed to spend clawback coin {coin_id.hex()}: %s", e)
if len(coins) > 0:
tx_id_list.extend(
(
await self.service.wallet_state_manager.spend_clawback_coins(
coins, tx_fee, tx_config, request.get("force", False), extra_conditions=extra_conditions
)
await self.service.wallet_state_manager.spend_clawback_coins(
coins, tx_fee, tx_config, request.get("force", False), extra_conditions=extra_conditions
)
)
return {
@ -1676,7 +1672,7 @@ class WalletRpcApi:
memos.append([mem.encode("utf-8") for mem in request["memos"]])
coins: Optional[Set[Coin]] = None
if "coins" in request and len(request["coins"]) > 0:
coins = set([Coin.from_json_dict(coin_json) for coin_json in request["coins"]])
coins = {Coin.from_json_dict(coin_json) for coin_json in request["coins"]}
fee: uint64 = uint64(request.get("fee", 0))
cat_discrepancy_params: Tuple[Optional[int], Optional[str], Optional[str]] = (
@ -2138,10 +2134,10 @@ class WalletRpcApi:
) -> EndpointResult:
wallet_id = uint32(request["wallet_id"])
wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet)
coin_announcements: Set[bytes] = set([])
coin_announcements: Set[bytes] = set()
for ca in request.get("coin_announcements", []):
coin_announcements.add(bytes.fromhex(ca))
puzzle_announcements: Set[bytes] = set([])
puzzle_announcements: Set[bytes] = set()
for pa in request.get("puzzle_announcements", []):
puzzle_announcements.add(bytes.fromhex(pa))
@ -2992,7 +2988,7 @@ class WalletRpcApi:
did_id = request.get("did_id", None)
if did_id is not None:
if did_id == "":
did_id = bytes()
did_id = b""
else:
did_id = decode_puzzle_hash(did_id)
@ -3535,7 +3531,7 @@ class WalletRpcApi:
xch_coin_list = request.get("xch_coins", None)
xch_coins = None
if xch_coin_list:
xch_coins = set([Coin.from_json_dict(xch_coin) for xch_coin in xch_coin_list])
xch_coins = {Coin.from_json_dict(xch_coin) for xch_coin in xch_coin_list}
xch_change_target = request.get("xch_change_target", None)
if xch_change_target is not None:
if xch_change_target[:2] == "xch":
@ -3729,7 +3725,7 @@ class WalletRpcApi:
coins = None
if "coins" in request and len(request["coins"]) > 0:
coins = set([Coin.from_json_dict(coin_json) for coin_json in request["coins"]])
coins = {Coin.from_json_dict(coin_json) for coin_json in request["coins"]}
coin_announcements: Optional[Set[Announcement]] = None
if (

View File

@ -30,34 +30,30 @@ class CrawlStore:
self = cls(connection)
await self.crawl_db.execute(
(
"CREATE TABLE IF NOT EXISTS peer_records("
" peer_id text PRIMARY KEY,"
" ip_address text,"
" port bigint,"
" connected int,"
" last_try_timestamp bigint,"
" try_count bigint,"
" connected_timestamp bigint,"
" added_timestamp bigint,"
" best_timestamp bigint,"
" version text,"
" handshake_time text"
" tls_version text)"
)
"CREATE TABLE IF NOT EXISTS peer_records("
" peer_id text PRIMARY KEY,"
" ip_address text,"
" port bigint,"
" connected int,"
" last_try_timestamp bigint,"
" try_count bigint,"
" connected_timestamp bigint,"
" added_timestamp bigint,"
" best_timestamp bigint,"
" version text,"
" handshake_time text"
" tls_version text)"
)
await self.crawl_db.execute(
(
"CREATE TABLE IF NOT EXISTS peer_reliability("
" peer_id text PRIMARY KEY,"
" ignore_till int, ban_till int,"
" stat_2h_w real, stat_2h_c real, stat_2h_r real,"
" stat_8h_w real, stat_8h_c real, stat_8h_r real,"
" stat_1d_w real, stat_1d_c real, stat_1d_r real,"
" stat_1w_w real, stat_1w_c real, stat_1w_r real,"
" stat_1m_w real, stat_1m_c real, stat_1m_r real,"
" tries int, successes int)"
)
"CREATE TABLE IF NOT EXISTS peer_reliability("
" peer_id text PRIMARY KEY,"
" ignore_till int, ban_till int,"
" stat_2h_w real, stat_2h_c real, stat_2h_r real,"
" stat_8h_w real, stat_8h_c real, stat_8h_r real,"
" stat_1d_w real, stat_1d_c real, stat_1d_r real,"
" stat_1w_w real, stat_1w_c real, stat_1w_r real,"
" stat_1m_w real, stat_1m_c real, stat_1m_r real,"
" tries int, successes int)"
)
try:
@ -65,7 +61,7 @@ class CrawlStore:
except aiosqlite.OperationalError:
pass # ignore what is likely Duplicate column error
await self.crawl_db.execute(("CREATE TABLE IF NOT EXISTS good_peers(ip text)"))
await self.crawl_db.execute("CREATE TABLE IF NOT EXISTS good_peers(ip text)")
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS ip_address on peer_records(ip_address)")

View File

@ -48,7 +48,7 @@ class Crawler:
minimum_version_count: int = 0
peers_retrieved: List[RespondPeers] = field(default_factory=list)
host_to_version: Dict[str, str] = field(default_factory=dict)
versions: Dict[str, int] = field(default_factory=lambda: defaultdict(lambda: 0))
versions: Dict[str, int] = field(default_factory=lambda: defaultdict(int))
version_cache: List[Tuple[str, str]] = field(default_factory=list)
handshake_time: Dict[str, uint64] = field(default_factory=dict)
best_timestamp_per_peer: Dict[str, uint64] = field(default_factory=dict)
@ -256,7 +256,7 @@ class Crawler:
for host, timestamp in self.best_timestamp_per_peer.items()
if timestamp >= now - 5 * 24 * 3600
}
self.versions = defaultdict(lambda: 0)
self.versions = defaultdict(int)
for host, version in self.host_to_version.items():
self.versions[version] += 1

View File

@ -268,8 +268,11 @@ if sys.platform == "win32":
try:
return await self._chia_accept(listener)
except WindowsError as exc: # pylint: disable=E0602
if exc.winerror not in (_winapi.ERROR_NETNAME_DELETED, _winapi.ERROR_OPERATION_ABORTED):
except OSError as exc:
if exc.winerror not in ( # pylint: disable=E1101
_winapi.ERROR_NETNAME_DELETED,
_winapi.ERROR_OPERATION_ABORTED,
):
raise
def _chia_accept(self, listener: socket.socket) -> asyncio.Future[Tuple[socket.socket, Tuple[object, ...]]]:
@ -295,9 +298,12 @@ if sys.platform == "win32":
except asyncio.CancelledError:
conn.close()
raise
except WindowsError as exc: # pylint: disable=E0602
except OSError as exc:
# https://github.com/python/cpython/issues/93821#issuecomment-1157945855
if exc.winerror not in (_winapi.ERROR_NETNAME_DELETED, _winapi.ERROR_OPERATION_ABORTED):
if exc.winerror not in ( # pylint: disable=E1101
_winapi.ERROR_NETNAME_DELETED,
_winapi.ERROR_OPERATION_ABORTED,
):
raise
future = self._register(ov, listener, finish_accept) # pylint: disable=assignment-from-no-return

View File

@ -60,18 +60,16 @@ def ssl_context_for_server(
ssl_context.check_hostname = False
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
ssl_context.set_ciphers(
(
"ECDHE-ECDSA-AES256-GCM-SHA384:"
"ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-CHACHA20-POLY1305:"
"ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES128-GCM-SHA256:"
"ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-SHA384:"
"ECDHE-RSA-AES256-SHA384:"
"ECDHE-ECDSA-AES128-SHA256:"
"ECDHE-RSA-AES128-SHA256"
)
"ECDHE-ECDSA-AES256-GCM-SHA384:"
"ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-CHACHA20-POLY1305:"
"ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES128-GCM-SHA256:"
"ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-SHA384:"
"ECDHE-RSA-AES256-SHA384:"
"ECDHE-ECDSA-AES128-SHA256:"
"ECDHE-RSA-AES128-SHA256"
)
ssl_context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
ssl_context.verify_mode = ssl.CERT_REQUIRED

View File

@ -63,7 +63,7 @@ class SimulatorFullNodeRpcApi(FullNodeRpcApi):
return {"puzzle_hash": self.simulator_api.bt.farmer_ph.hex()}
async def get_all_coins(self, _request: Dict[str, object]) -> EndpointResult:
p_request = GetAllCoinsProtocol(bool((_request.get("include_spent_coins", False))))
p_request = GetAllCoinsProtocol(bool(_request.get("include_spent_coins", False)))
result: List[CoinRecord] = await self.simulator_api.get_all_coins(p_request)
return {"coin_records": [coin_record.to_json_dict() for coin_record in result]}

View File

@ -24,7 +24,7 @@ class ClassgroupElement(Streamable):
return ClassgroupElement(bytes100(data))
@staticmethod
def get_default_element() -> "ClassgroupElement":
def get_default_element() -> ClassgroupElement:
# Bit 3 in the first byte of serialized compressed form indicates if
# it's the default generator element.
return ClassgroupElement.create(b"\x08")

View File

@ -24,7 +24,7 @@ class Program(SExp):
"""
@classmethod
def parse(cls, f) -> "Program":
def parse(cls, f) -> Program:
return sexp_from_stream(f, cls.to)
def stream(self, f):
@ -45,7 +45,7 @@ class Program(SExp):
return Program.to(ret)
@classmethod
def fromhex(cls, hexstr: str) -> "Program":
def fromhex(cls, hexstr: str) -> Program:
return cls.from_bytes(hexstr_to_bytes(hexstr))
def __bytes__(self) -> bytes:
@ -56,7 +56,7 @@ class Program(SExp):
def __str__(self) -> str:
return bytes(self).hex()
def at(self, position: str) -> "Program":
def at(self, position: str) -> Program:
"""
Take a string of only `f` and `r` characters and follow the corresponding path.
@ -75,7 +75,7 @@ class Program(SExp):
raise ValueError(f"`at` got illegal character `{c}`. Only `f` & `r` allowed")
return v
def replace(self, **kwargs) -> "Program":
def replace(self, **kwargs) -> Program:
"""
Create a new program replacing the given paths (using `at` syntax).
Example:
@ -108,15 +108,15 @@ class Program(SExp):
def get_tree_hash(self) -> bytes32:
return bytes32(tree_hash(bytes(self)))
def _run(self, max_cost: int, flags: int, args) -> Tuple[int, "Program"]:
def _run(self, max_cost: int, flags: int, args) -> Tuple[int, Program]:
prog_args = Program.to(args)
cost, r = run_chia_program(self.as_bin(), prog_args.as_bin(), max_cost, flags)
return cost, Program.to(r)
def run_with_cost(self, max_cost: int, args) -> Tuple[int, "Program"]:
def run_with_cost(self, max_cost: int, args) -> Tuple[int, Program]:
return self._run(max_cost, 0, args)
def run(self, args) -> "Program":
def run(self, args) -> Program:
cost, r = self.run_with_cost(INFINITE_COST, args)
return r
@ -136,7 +136,7 @@ class Program(SExp):
#
# Resulting in a function which places its own arguments after those
# curried in in the form of a proper list.
def curry(self, *args) -> "Program":
def curry(self, *args) -> Program:
fixed_args: Any = 1
for arg in reversed(args):
fixed_args = [4, (1, arg), fixed_args]

View File

@ -66,7 +66,7 @@ class SerializedProgram:
return bytes(self).hex()
def __repr__(self) -> str:
return "%s(%s)" % (self.__class__.__name__, str(self))
return f"{self.__class__.__name__}({str(self)})"
def __eq__(self, other: object) -> bool:
if not isinstance(other, SerializedProgram):

View File

@ -2,9 +2,7 @@ from __future__ import annotations
import random
import secrets
from typing import BinaryIO, Iterable, Optional, SupportsBytes, Type, TypeVar, Union
from typing_extensions import SupportsIndex
from typing import BinaryIO, Iterable, Optional, SupportsBytes, SupportsIndex, Type, TypeVar, Union
_T_SizedBytes = TypeVar("_T_SizedBytes", bound="SizedBytes")
@ -36,7 +34,7 @@ class SizedBytes(bytes):
# created instance satisfies the length limitation of the particular subclass.
super().__init__()
if len(self) != self._size:
raise ValueError("bad %s initializer %s" % (type(self).__name__, v))
raise ValueError(f"bad {type(self).__name__} initializer {v}")
@classmethod
def parse(cls: Type[_T_SizedBytes], f: BinaryIO) -> _T_SizedBytes:
@ -73,4 +71,4 @@ class SizedBytes(bytes):
return self.hex()
def __repr__(self) -> str:
return "<%s: %s>" % (self.__class__.__name__, str(self))
return f"<{self.__class__.__name__}: {str(self)}>"

View File

@ -133,7 +133,7 @@ def _load_config_maybe_locked(
with contextlib.ExitStack() as exit_stack:
if acquire_lock:
exit_stack.enter_context(lock_config(root_path, filename))
with open(path, "r") as opened_config_file:
with open(path) as opened_config_file:
r = yaml.safe_load(opened_config_file)
if r is None:
log.error(f"yaml.safe_load returned None: {path}")

View File

@ -125,7 +125,7 @@ class FileKeyringContent:
@classmethod
def create_from_path(cls, path: Path) -> FileKeyringContent:
loaded_dict = dict(yaml.safe_load(open(path, "r")))
loaded_dict = dict(yaml.safe_load(open(path)))
version = int(loaded_dict["version"])
if version > MAX_SUPPORTED_VERSION:

View File

@ -101,7 +101,7 @@ class Node(metaclass=ABCMeta):
pass
@abstractmethod
def add(self, toadd: bytes, depth: int) -> "Node":
def add(self, toadd: bytes, depth: int) -> Node:
pass
@abstractmethod

View File

@ -185,7 +185,7 @@ async def resolve(host: str, *, prefer_ipv6: bool = False) -> IPAddress:
except ValueError:
pass
addrset: List[
Tuple["socket.AddressFamily", "socket.SocketKind", int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]
Tuple[socket.AddressFamily, socket.SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]
] = await asyncio.get_event_loop().getaddrinfo(host, None)
# The list returned by getaddrinfo is never empty, an exception is thrown or data is returned.
ips_v4 = []

View File

@ -39,8 +39,7 @@ def all_groups() -> KeysView[str]:
def services_for_groups(groups: Iterable[str]) -> Generator[str, None, None]:
for group in groups:
for service in SERVICES_FOR_GROUP[group]:
yield service
yield from SERVICES_FOR_GROUP[group]
def validate_service(service: str) -> bool:

View File

@ -7,6 +7,7 @@ import pprint
import traceback
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Callable,
@ -23,7 +24,7 @@ from typing import (
)
from blspy import G1Element, G2Element, PrivateKey
from typing_extensions import TYPE_CHECKING, Literal, get_args, get_origin
from typing_extensions import Literal, get_args, get_origin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.byte_types import hexstr_to_bytes

View File

@ -1,8 +1,8 @@
from __future__ import annotations
from typing import BinaryIO, ClassVar, SupportsInt, Type, TypeVar, Union
from typing import BinaryIO, ClassVar, SupportsIndex, SupportsInt, Type, TypeVar, Union
from typing_extensions import Protocol, SupportsIndex
from typing_extensions import Protocol
_T_StructStream = TypeVar("_T_StructStream", bound="StructStream")

View File

@ -89,7 +89,7 @@ class CATWallet:
tx_config: TXConfig,
fee: uint64 = uint64(0),
name: Optional[str] = None,
) -> "CATWallet":
) -> CATWallet:
self = CATWallet()
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)

View File

@ -297,7 +297,7 @@ class DAOCATWallet:
)
]
message = Program.to([proposal_id, vote_amount, is_yes_vote, coin.name()]).get_tree_hash()
puzzle_announcements = set([message])
puzzle_announcements = {message}
inner_solution = self.standard_wallet.make_solution(
primaries=primaries, puzzle_announcements=puzzle_announcements
)
@ -320,7 +320,7 @@ class DAOCATWallet:
)
)
message = Program.to([proposal_id, vote_amount, is_yes_vote, coin.name()]).get_tree_hash()
puzzle_announcements = set([message])
puzzle_announcements = {message}
inner_solution = self.standard_wallet.make_solution(
primaries=primaries, puzzle_announcements=puzzle_announcements
)

View File

@ -21,14 +21,12 @@ class CATLineageStore:
table_name: str
@classmethod
async def create(cls, db_wrapper: DBWrapper2, asset_id: str) -> "CATLineageStore":
async def create(cls, db_wrapper: DBWrapper2, asset_id: str) -> CATLineageStore:
self = cls()
self.table_name = f"lineage_proofs_{asset_id}"
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(f"CREATE TABLE IF NOT EXISTS {self.table_name}(coin_id text PRIMARY KEY, lineage blob)")
)
await conn.execute(f"CREATE TABLE IF NOT EXISTS {self.table_name}(coin_id text PRIMARY KEY, lineage blob)")
return self
async def add_lineage_proof(self, coin_id: bytes32, lineage: LineageProof) -> None:

View File

@ -768,7 +768,7 @@ class NFTWallet:
innersol: Program = self.standard_wallet.make_solution(
primaries=payments,
coin_announcements=None if announcement_to_make is None else set((announcement_to_make,)),
coin_announcements=None if announcement_to_make is None else {announcement_to_make},
coin_announcements_to_assert=coin_announcements_bytes,
puzzle_announcements_to_assert=puzzle_announcements_bytes,
conditions=extra_conditions,

View File

@ -30,7 +30,7 @@ class NotificationStore:
@classmethod
async def create(
cls, db_wrapper: DBWrapper2, cache_size: uint32 = uint32(600000), name: Optional[str] = None
) -> "NotificationStore":
) -> NotificationStore:
self = cls()
if name:

View File

@ -25,7 +25,7 @@ class Payment:
return self.as_condition().get_tree_hash()
@classmethod
def from_condition(cls, condition: Program) -> "Payment":
def from_condition(cls, condition: Program) -> Payment:
python_condition: List = condition.as_python()
puzzle_hash, amount = python_condition[1:3]
memos: List[bytes] = []

View File

@ -66,11 +66,11 @@ def compile_clvm_in_lock(full_path: pathlib.Path, output: pathlib.Path, search_p
rs256 = sha256file(output)
if orig256 != rs256:
print("Compiled original %s: %s vs rust %s\n" % (full_path, orig256, rs256))
print(f"Compiled original {full_path}: {orig256} vs rust {rs256}\n")
print("Aborting compilation due to mismatch with rust")
assert orig256 == rs256
else:
print("Compilation match %s: %s\n" % (full_path, orig256))
print(f"Compilation match {full_path}: {orig256}\n")
return res

View File

@ -357,7 +357,7 @@ class TradeManager:
await self.trade_store.add_trade_record(trade, offer_name)
# We want to subscribe to the coin IDs of all coins that are not the ephemeral offer coins
offered_coins: Set[Coin] = set([value for values in offer.get_offered_coins().values() for value in values])
offered_coins: Set[Coin] = {value for values in offer.get_offered_coins().values() for value in values}
non_offer_additions: Set[Coin] = set(offer.additions()) ^ offered_coins
non_offer_removals: Set[Coin] = set(offer.removals()) ^ offered_coins
await self.wallet_state_manager.add_interested_coin_ids(

View File

@ -59,7 +59,7 @@ class NotarizedPayment(Payment):
nonce: bytes32 = ZERO_32
@classmethod
def from_condition_and_nonce(cls, condition: Program, nonce: bytes32) -> "NotarizedPayment":
def from_condition_and_nonce(cls, condition: Program, nonce: bytes32) -> NotarizedPayment:
with_opcode: Program = Program.to((51, condition)) # Gotta do this because the super class is expecting it
p = Payment.from_condition(with_opcode)
puzzle_hash, amount, memos = tuple(p.as_condition_args())

View File

@ -86,7 +86,7 @@ class TradeStore:
@classmethod
async def create(
cls, db_wrapper: DBWrapper2, cache_size: uint32 = uint32(600000), name: Optional[str] = None
) -> "TradeStore":
) -> TradeStore:
self = cls()
if name:
@ -99,20 +99,18 @@ class TradeStore:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS trade_records("
" trade_record blob,"
" trade_id text PRIMARY KEY,"
" status int,"
" confirmed_at_index int,"
" created_at_time bigint,"
" sent int,"
" is_my_offer tinyint)"
)
"CREATE TABLE IF NOT EXISTS trade_records("
" trade_record blob,"
" trade_id text PRIMARY KEY,"
" status int,"
" confirmed_at_index int,"
" created_at_time bigint,"
" sent int,"
" is_my_offer tinyint)"
)
await conn.execute(
("CREATE TABLE IF NOT EXISTS coin_of_interest_to_trade_record(trade_id blob, coin_id blob)")
"CREATE TABLE IF NOT EXISTS coin_of_interest_to_trade_record(trade_id blob, coin_id blob)"
)
await conn.execute(
"CREATE INDEX IF NOT EXISTS coin_to_trade_record_index on coin_of_interest_to_trade_record(trade_id)"

View File

@ -24,7 +24,7 @@ minimum_send_attempts = 6
@dataclass
class ItemAndTransactionRecords(Generic[T]):
item: T
transaction_records: List["TransactionRecord"]
transaction_records: List[TransactionRecord]
@streamable

View File

@ -14,7 +14,7 @@ from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_fo
from chia.util.hash import std_hash
from chia.wallet.uncurried_puzzle import UncurriedPuzzle
CONDITIONS = dict((k, bytes(v)[0]) for k, v in ConditionOpcode.__members__.items()) # pylint: disable=E1101
CONDITIONS = {k: bytes(v)[0] for k, v in ConditionOpcode.__members__.items()} # pylint: disable=E1101
KFA = {v: k for k, v in CONDITIONS.items()}
@ -159,7 +159,7 @@ def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.A
created = set(spend_bundle.additions())
spent = set(spend_bundle.removals())
zero_coin_set = set(coin.name() for coin in created if coin.amount == 0)
zero_coin_set = {coin.name() for coin in created if coin.amount == 0}
ephemeral = created.intersection(spent)
created.difference_update(ephemeral)
@ -189,9 +189,7 @@ def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.A
as_hex = [f"0x{_.hex()}" for _ in announcement]
print(f" {as_hex} =>\n {hashed}")
eor_coin_announcements = sorted(
set(_[-1] for _ in created_coin_announcement_pairs) ^ set(asserted_coin_announcements)
)
eor_coin_announcements = sorted({_[-1] for _ in created_coin_announcement_pairs} ^ set(asserted_coin_announcements))
created_puzzle_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_puzzle_announcements]
if created_puzzle_announcements:
@ -201,7 +199,7 @@ def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.A
print(f" {as_hex} =>\n {hashed}")
eor_puzzle_announcements = sorted(
set(_[-1] for _ in created_puzzle_announcement_pairs) ^ set(asserted_puzzle_announcements)
{_[-1] for _ in created_puzzle_announcement_pairs} ^ set(asserted_puzzle_announcements)
)
print()

View File

@ -435,7 +435,7 @@ class CRCAT:
inner_solution: Program,
# For optimization purposes the conditions may already have been run
conditions: Optional[Iterable[Program]] = None,
) -> Tuple[List[Announcement], CoinSpend, List["CRCAT"]]:
) -> Tuple[List[Announcement], CoinSpend, List[CRCAT]]:
"""
Spend a CR-CAT.

View File

@ -82,7 +82,7 @@ class CRCATWallet(CATWallet):
tx_config: TXConfig,
fee: uint64 = uint64(0),
name: Optional[str] = None,
) -> "CATWallet": # pragma: no cover
) -> CATWallet: # pragma: no cover
raise NotImplementedError("create_new_cat_wallet is a legacy method and is not available on CR-CAT wallets")
@staticmethod
@ -597,7 +597,7 @@ class CRCATWallet(CATWallet):
vc.launcher_id,
tx_config,
puzzle_announcements=set(vc_announcements_to_make),
coin_announcements_to_consume=set((*expected_announcements, announcement)),
coin_announcements_to_consume={*expected_announcements, announcement},
)
else:
vc_txs = []
@ -793,7 +793,7 @@ class CRCATWallet(CATWallet):
fee,
uint64(0),
tx_config,
announcements_to_assert=set(Announcement(coin.name(), nonce) for coin in coins.union({vc.coin})),
announcements_to_assert={Announcement(coin.name(), nonce) for coin in coins.union({vc.coin})},
)
if chia_tx.spend_bundle is None:
raise RuntimeError("Did not get spendbundle for fee transaction") # pragma: no cover
@ -805,7 +805,7 @@ class CRCATWallet(CATWallet):
vc_txs: List[TransactionRecord] = await vc_wallet.generate_signed_transaction(
vc.launcher_id,
tx_config,
puzzle_announcements=set(crcat.expected_announcement() for crcat, _ in crcats_and_puzhashes),
puzzle_announcements={crcat.expected_announcement() for crcat, _ in crcats_and_puzhashes},
coin_announcements={nonce},
coin_announcements_to_consume=set(expected_announcements),
extra_conditions=extra_conditions,

View File

@ -710,7 +710,7 @@ class VerifiedCredential(Streamable):
inner_solution: Program,
new_proof_hash: Optional[bytes32] = None,
new_proof_provider: Optional[bytes32] = None,
) -> Tuple[Optional[bytes32], CoinSpend, "VerifiedCredential"]:
) -> Tuple[Optional[bytes32], CoinSpend, VerifiedCredential]:
"""
Given an inner puzzle reveal and solution, spend the VC (potentially updating the proofs in the process).
Note that the inner puzzle is already expected to output the 'magic' condition (which can be created above).
@ -806,7 +806,7 @@ class VerifiedCredential(Streamable):
def _next_vc(
self, next_inner_puzzle_hash: bytes32, new_proof_hash: Optional[bytes32], next_amount: uint64
) -> "VerifiedCredential":
) -> VerifiedCredential:
"""
Private method that creates the next VC class instance.
"""

View File

@ -121,28 +121,26 @@ class VCStore:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS vc_records("
# VerifiedCredential.launcher_id
" launcher_id text PRIMARY KEY,"
# VerifiedCredential.coin
" coin_id text,"
" parent_coin_info text,"
" puzzle_hash text,"
" amount blob,"
# VerifiedCredential.singleton_lineage_proof
" singleton_lineage_proof blob,"
# VerifiedCredential.ownership_lineage_proof
" ownership_lineage_proof blob,"
# VerifiedCredential.inner_puzzle_hash
" inner_puzzle_hash text,"
# VerifiedCredential.proof_provider
" proof_provider text,"
# VerifiedCredential.proof_hash
" proof_hash text,"
# VCRecord.confirmed_height
" confirmed_height int)"
)
"CREATE TABLE IF NOT EXISTS vc_records("
# VerifiedCredential.launcher_id
" launcher_id text PRIMARY KEY,"
# VerifiedCredential.coin
" coin_id text,"
" parent_coin_info text,"
" puzzle_hash text,"
" amount blob,"
# VerifiedCredential.singleton_lineage_proof
" singleton_lineage_proof blob,"
# VerifiedCredential.ownership_lineage_proof
" ownership_lineage_proof blob,"
# VerifiedCredential.inner_puzzle_hash
" inner_puzzle_hash text,"
# VerifiedCredential.proof_provider
" proof_provider text,"
# VerifiedCredential.proof_hash
" proof_hash text,"
# VCRecord.confirmed_height
" confirmed_height int)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS coin_id_index ON vc_records(coin_id)")

View File

@ -279,7 +279,7 @@ class VCWallet:
fee, tx_config, Announcement(vc_record.vc.coin.name(), announcement_to_make)
)
if coin_announcements is None:
coin_announcements = set((announcement_to_make,))
coin_announcements = {announcement_to_make}
else:
coin_announcements.add(announcement_to_make) # pragma: no cover
else:

View File

@ -69,19 +69,17 @@ class WalletCoinStore:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS coin_record("
"coin_name text PRIMARY KEY,"
" confirmed_height bigint,"
" spent_height bigint,"
" spent int,"
" coinbase int,"
" puzzle_hash text,"
" coin_parent text,"
" amount blob,"
" wallet_type int,"
" wallet_id int)"
)
"CREATE TABLE IF NOT EXISTS coin_record("
"coin_name text PRIMARY KEY,"
" confirmed_height bigint,"
" spent_height bigint,"
" spent int,"
" coinbase int,"
" puzzle_hash text,"
" coin_parent text,"
" amount blob,"
" wallet_type int,"
" wallet_id int)"
)
# Useful for reorg lookups
@ -301,7 +299,7 @@ class WalletCoinStore:
"SELECT * FROM coin_record WHERE coin_type=? AND wallet_id=? AND spent_height=0",
(coin_type, wallet_id),
)
return set(self.coin_record_from_row(row) for row in rows)
return {self.coin_record_from_row(row) for row in rows}
async def get_all_unspent_coins(self, coin_type: CoinType = CoinType.NORMAL) -> Set[WalletCoinRecord]:
"""Returns set of CoinRecords that have not been spent yet for a wallet."""
@ -309,7 +307,7 @@ class WalletCoinStore:
rows = await conn.execute_fetchall(
"SELECT * FROM coin_record WHERE coin_type=? AND spent_height=0", (coin_type,)
)
return set(self.coin_record_from_row(row) for row in rows)
return {self.coin_record_from_row(row) for row in rows}
# Checks DB and DiffStores for CoinRecords with puzzle_hash and returns them
async def get_coin_records_by_puzzle_hash(self, puzzle_hash: bytes32) -> List[WalletCoinRecord]:

View File

@ -46,18 +46,16 @@ class WalletNftStore:
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS users_nfts("
" nft_id text PRIMARY KEY,"
" nft_coin_id text,"
" wallet_id int,"
" did_id text,"
" coin text,"
" lineage_proof text,"
" mint_height bigint,"
" status text,"
" full_puzzle blob)"
)
"CREATE TABLE IF NOT EXISTS users_nfts("
" nft_id text PRIMARY KEY,"
" nft_coin_id text,"
" wallet_id int,"
" did_id text,"
" coin text,"
" lineage_proof text,"
" mint_height bigint,"
" status text,"
" full_puzzle blob)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS nft_coin_id on users_nfts(nft_coin_id)")
await conn.execute("CREATE INDEX IF NOT EXISTS nft_wallet_id on users_nfts(wallet_id)")

View File

@ -303,7 +303,7 @@ class WalletNode:
async with manage_connection(db_path) as conn:
self.log.info("Resetting wallet sync data...")
rows = list(await conn.execute_fetchall("SELECT name FROM sqlite_master WHERE type='table'"))
names = set([x[0] for x in rows])
names = {x[0] for x in rows}
names = names - set(required_tables)
for name in names:
for ignore_name in ignore_tables:

View File

@ -36,17 +36,15 @@ class WalletPuzzleStore:
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS derivation_paths("
"derivation_index int,"
" pubkey text,"
" puzzle_hash text,"
" wallet_type int,"
" wallet_id int,"
" used tinyint,"
" hardened tinyint,"
" PRIMARY KEY(puzzle_hash, wallet_id))"
)
"CREATE TABLE IF NOT EXISTS derivation_paths("
"derivation_index int,"
" pubkey text,"
" puzzle_hash text,"
" wallet_type int,"
" wallet_id int,"
" used tinyint,"
" hardened tinyint,"
" PRIMARY KEY(puzzle_hash, wallet_id))"
)
await conn.execute(
"CREATE INDEX IF NOT EXISTS derivation_index_index on derivation_paths(derivation_index)"
@ -300,7 +298,7 @@ class WalletPuzzleStore:
rows = await conn.execute_fetchall(
"SELECT puzzle_hash FROM derivation_paths WHERE wallet_id=?", (wallet_id,)
)
return set(bytes32.fromhex(row[0]) for row in rows)
return {bytes32.fromhex(row[0]) for row in rows}
async def get_last_derivation_path(self) -> Optional[uint32]:
"""
@ -373,7 +371,7 @@ class WalletPuzzleStore:
cursor = await conn.execute("DELETE FROM derivation_paths WHERE wallet_id=?;", (wallet_id,))
await cursor.close()
# Clear caches
puzzle_hashes = set(bytes32.fromhex(row[0]) for row in rows)
puzzle_hashes = {bytes32.fromhex(row[0]) for row in rows}
for puzzle_hash in puzzle_hashes:
try:
self.wallet_identifier_cache.remove(puzzle_hash)

View File

@ -17,7 +17,7 @@ class WalletRetryStore:
db_wrapper: DBWrapper2
@classmethod
async def create(cls, db_wrapper: DBWrapper2) -> "WalletRetryStore":
async def create(cls, db_wrapper: DBWrapper2) -> WalletRetryStore:
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:

View File

@ -34,19 +34,17 @@ class WalletSingletonStore:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS singletons("
"coin_id blob PRIMARY KEY,"
" coin text,"
" singleton_id blob,"
" wallet_id int,"
" parent_coin_spend blob,"
" inner_puzzle_hash blob,"
" pending tinyint,"
" removed_height int,"
" lineage_proof blob,"
" custom_data blob)"
)
"CREATE TABLE IF NOT EXISTS singletons("
"coin_id blob PRIMARY KEY,"
" coin text,"
" singleton_id blob,"
" wallet_id int,"
" parent_coin_spend blob,"
" inner_puzzle_hash blob,"
" pending tinyint,"
" removed_height int,"
" lineage_proof blob,"
" custom_data blob)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS removed_height_index on singletons(removed_height)")

View File

@ -2222,7 +2222,7 @@ class WalletStateManager:
and error not in (Err.INVALID_FEE_LOW_FEE, Err.INVALID_FEE_TOO_CLOSE_TO_ZERO)
):
coins_removed = tx.spend_bundle.removals()
trade_coins_removed = set([])
trade_coins_removed = set()
trades = []
for removed_coin in coins_removed:
trade = await self.trade_manager.get_trade_by_coin(removed_coin)
@ -2421,9 +2421,7 @@ class WalletStateManager:
# but the coin_cache keeps all wallet_ids for each puzzle hash
for puzzle_hash in puzzle_hashes:
if puzzle_hash in self.interested_coin_cache:
wallet_ids_to_add = list(
set([w for w in wallet_ids if w not in self.interested_coin_cache[puzzle_hash]])
)
wallet_ids_to_add = list({w for w in wallet_ids if w not in self.interested_coin_cache[puzzle_hash]})
self.interested_coin_cache[puzzle_hash].extend(wallet_ids_to_add)
else:
self.interested_coin_cache[puzzle_hash] = list(set(wallet_ids))
@ -2437,7 +2435,7 @@ class WalletStateManager:
for coin_id in coin_ids:
if coin_id in self.interested_coin_cache:
# prevent repeated wallet_ids from appearing in the coin cache
wallet_ids_to_add = list(set([w for w in wallet_ids if w not in self.interested_coin_cache[coin_id]]))
wallet_ids_to_add = list({w for w in wallet_ids if w not in self.interested_coin_cache[coin_id]})
self.interested_coin_cache[coin_id].extend(wallet_ids_to_add)
else:
self.interested_coin_cache[coin_id] = list(set(wallet_ids))

View File

@ -46,21 +46,19 @@ class WalletTransactionStore:
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS transaction_record("
" transaction_record blob,"
" bundle_id text PRIMARY KEY," # NOTE: bundle_id is being stored as bytes, not hex
" confirmed_at_height bigint,"
" created_at_time bigint,"
" to_puzzle_hash text,"
" amount blob,"
" fee_amount blob,"
" confirmed int,"
" sent int,"
" wallet_id bigint,"
" trade_id text,"
" type int)"
)
"CREATE TABLE IF NOT EXISTS transaction_record("
" transaction_record blob,"
" bundle_id text PRIMARY KEY," # NOTE: bundle_id is being stored as bytes, not hex
" confirmed_at_height bigint,"
" created_at_time bigint,"
" to_puzzle_hash text,"
" amount blob,"
" fee_amount blob,"
" confirmed int,"
" sent int,"
" wallet_id bigint,"
" trade_id text,"
" type int)"
)
# Useful for reorg lookups

View File

@ -23,13 +23,11 @@ class WalletUserStore:
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS users_wallets("
"id INTEGER PRIMARY KEY AUTOINCREMENT,"
" name text,"
" wallet_type int,"
" data text)"
)
"CREATE TABLE IF NOT EXISTS users_wallets("
"id INTEGER PRIMARY KEY AUTOINCREMENT,"
" name text,"
" wallet_type int,"
" data text)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS name on users_wallets(name)")

View File

@ -39,12 +39,12 @@ def make_semver(version_str: str) -> str:
local = v.local
version = "{0}.{1}.{2}".format(major, minor, patch)
version = f"{major}.{minor}.{patch}"
if prerelease:
version += "-{0}".format(".".join(prerelease))
version += "-{}".format(".".join(prerelease))
if local:
version += "+{0}".format(local)
version += f"+{local}"
return version

View File

@ -56,6 +56,7 @@ dev_dependencies = [
"pytest-mock==3.11.1",
"pytest-monitor==1.6.6; sys_platform == 'linux'",
"pytest-xdist==3.3.1",
"pyupgrade==3.15.0",
"twine==4.0.2",
"isort==5.12.0",
"flake8==6.1.0",

View File

@ -14,7 +14,7 @@ def check_create(sql_type: str, cwd: str, exemptions: Set[Tuple[str, str]] = set
# such as with worktrees, at least in particular uses of them. i think that we could switch to letting
# pre-commit provide the file list instead of reaching out to git to build that list ourselves. until we
# make time to handle that, this is an alternative to alleviate the issue.
exemptions = set((cwd + "/" + file, name) for file, name in exemptions)
exemptions = {(cwd + "/" + file, name) for file, name in exemptions}
lines = check_output(["git", "grep", f"CREATE {sql_type}"]).decode("ascii").split("\n")
ret = 0
@ -54,24 +54,20 @@ ret += check_create("INDEX", "chia/wallet")
ret += check_create(
"INDEX",
"chia/full_node",
set(
[
("block_store.py", "is_fully_compactified"),
("block_store.py", "height"),
]
),
{
("block_store.py", "is_fully_compactified"),
("block_store.py", "height"),
},
)
ret += check_create("TABLE", "chia/wallet")
ret += check_create(
"TABLE",
"chia/full_node",
set(
[
("block_store.py", "sub_epoch_segments_v3"),
("block_store.py", "full_blocks"),
("coin_store.py", "coin_record"),
("hint_store.py", "hints"),
]
),
{
("block_store.py", "sub_epoch_segments_v3"),
("block_store.py", "full_blocks"),
("coin_store.py", "coin_record"),
("hint_store.py", "hints"),
},
)
sys.exit(ret)

View File

@ -38,7 +38,7 @@ from tests.cmds.wallet.test_consts import (
test_offer_file_path: Path = Path("tests") / "cmds" / "wallet" / "test_offer.toffer"
test_offer_file_name: str = str(test_offer_file_path)
test_offer_file_bech32: str = open(test_offer_file_name, "r").read()
test_offer_file_bech32: str = open(test_offer_file_name).read()
test_offer_id: str = "0xdfb7e8643376820ec995b0bcdb3fc1f764c16b814df5e074631263fcf1e00839"
test_offer_id_bytes: bytes32 = bytes32.from_hexstr(test_offer_id)

View File

@ -622,20 +622,16 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Public Key: 92f15caed8a5495faa7ec25a8af3f223438ef73c974b0aa81e788057b1154870f149739b2c2d0e"
"736234baf9386f7f83"
)
"Public Key: 92f15caed8a5495faa7ec25a8af3f223438ef73c974b0aa81e788057b1154870f149739b2c2d0e"
"736234baf9386f7f83"
)
!= -1
)
assert (
result.output.find(
(
"Signature: a82e7d1b87d8c25a6ccac603194011d73f71fc76c17c1ce4ee53484f81874f116b1cb9dd991bcf9"
"aa41c10beaab54a830fc6f7e5e25a9144f73e38a6fb852a87e36d80f575a6f84359144e6e9499ba9208912de55"
"a1f7514cd8cfa166ae48e64"
)
"Signature: a82e7d1b87d8c25a6ccac603194011d73f71fc76c17c1ce4ee53484f81874f116b1cb9dd991bcf9"
"aa41c10beaab54a830fc6f7e5e25a9144f73e38a6fb852a87e36d80f575a6f84359144e6e9499ba9208912de55"
"a1f7514cd8cfa166ae48e64"
)
!= -1
)
@ -657,19 +653,15 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Public Key: b5e383b8192dacff662455bdb3bbfc433f678f0d7ff7f118149e0d2ad39aa6d59ac4cb3662acf8"
"e8307e66069d3a13cc"
)
"Public Key: b5e383b8192dacff662455bdb3bbfc433f678f0d7ff7f118149e0d2ad39aa6d59ac4cb3662acf8"
"e8307e66069d3a13cc"
)
) != -1
assert (
result.output.find(
(
"Signature: b5b3bc1417f67498748018a7ad2c95acfc5ae2dcd0d9dd0f3abfc7e3f047f2e6cf6c3e775b6caff"
"a3e0baaadc2fe705a100cd4c961d6ff3c575c5c33683eb7b1e2dbbcaf37318227ae40ef8ccf57879a7818fad8f"
"dc573d55c908be2611b8077"
)
"Signature: b5b3bc1417f67498748018a7ad2c95acfc5ae2dcd0d9dd0f3abfc7e3f047f2e6cf6c3e775b6caff"
"a3e0baaadc2fe705a100cd4c961d6ff3c575c5c33683eb7b1e2dbbcaf37318227ae40ef8ccf57879a7818fad8f"
"dc573d55c908be2611b8077"
)
) != -1
@ -697,20 +689,16 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Public Key: "
"92f15caed8a5495faa7ec25a8af3f223438ef73c974b0aa81e788057b1154870f149739b2c2d0e736234baf9386f7f83"
)
"Public Key: "
"92f15caed8a5495faa7ec25a8af3f223438ef73c974b0aa81e788057b1154870f149739b2c2d0e736234baf9386f7f83"
)
!= -1
)
assert (
result.output.find(
(
"Signature: a82e7d1b87d8c25a6ccac603194011d73f71fc76c17c1ce4ee53484f81874f116b1cb9dd991bcf"
"9aa41c10beaab54a830fc6f7e5e25a9144f73e38a6fb852a87e36d80f575a6f84359144e6e9499ba9208912de"
"55a1f7514cd8cfa166ae48e64"
)
"Signature: a82e7d1b87d8c25a6ccac603194011d73f71fc76c17c1ce4ee53484f81874f116b1cb9dd991bcf"
"9aa41c10beaab54a830fc6f7e5e25a9144f73e38a6fb852a87e36d80f575a6f84359144e6e9499ba9208912de"
"55a1f7514cd8cfa166ae48e64"
)
!= -1
)
@ -782,19 +770,15 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Found public key: a4601f992f24047097a30854ef656382911575694439108723698"
"972941e402d737c13df76fdf43597f7b3c2fa9ed27a (HD path: m/12381/8444/2/9)"
)
"Found public key: a4601f992f24047097a30854ef656382911575694439108723698"
"972941e402d737c13df76fdf43597f7b3c2fa9ed27a (HD path: m/12381/8444/2/9)"
)
!= -1
)
assert (
result.output.find(
(
"Found private key: "
"028e33fa3f8caa3102c028f3bff6b6680e528d9a0c543c479ef0b0339060ef36 (HD path: m/12381/8444/2/9)"
)
"Found private key: "
"028e33fa3f8caa3102c028f3bff6b6680e528d9a0c543c479ef0b0339060ef36 (HD path: m/12381/8444/2/9)"
)
!= -1
)
@ -842,10 +826,8 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Found wallet address: "
"xch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd (HD path: m/12381/8444/2/30)"
)
"Found wallet address: "
"xch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd (HD path: m/12381/8444/2/30)"
)
!= -1
)
@ -895,10 +877,8 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Found wallet address: "
"txch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs2v6lg7 (HD path: m/12381/8444/2/30)"
)
"Found wallet address: "
"txch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs2v6lg7 (HD path: m/12381/8444/2/30)"
)
!= -1
)
@ -990,10 +970,8 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Found public key: 80dc3a2ea450eb09e24debe22e1b5934911ba530792ef0be361bad"
"ebb168780bd328ff8d4655e5dd573d5bef4a340344 (HD path: m/12381n/8444n/2/35)"
)
"Found public key: 80dc3a2ea450eb09e24debe22e1b5934911ba530792ef0be361bad"
"ebb168780bd328ff8d4655e5dd573d5bef4a340344 (HD path: m/12381n/8444n/2/35)"
)
!= -1
)
@ -1042,19 +1020,15 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Wallet address 50 (m/12381n/8444n/2n/50n): "
"xch1jp2u7an0mn9hdlw2x05nmje49gwgzmqyvh0qmh6008yksetuvkfs6wrfdq"
)
"Wallet address 50 (m/12381n/8444n/2n/50n): "
"xch1jp2u7an0mn9hdlw2x05nmje49gwgzmqyvh0qmh6008yksetuvkfs6wrfdq"
)
!= -1
)
assert (
result.output.find(
(
"Wallet address 51 (m/12381n/8444n/2n/51n): "
"xch1006n6l3x5e8exar8mlj004znjl5pq0tq73h76kz0yergswnjzn8sumvfmt"
)
"Wallet address 51 (m/12381n/8444n/2n/51n): "
"xch1006n6l3x5e8exar8mlj004znjl5pq0tq73h76kz0yergswnjzn8sumvfmt"
)
!= -1
)
@ -1105,19 +1079,15 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Wallet address 50 (m/12381n/8444n/2n/50n): "
"txch1jp2u7an0mn9hdlw2x05nmje49gwgzmqyvh0qmh6008yksetuvkfshfylvn"
)
"Wallet address 50 (m/12381n/8444n/2n/50n): "
"txch1jp2u7an0mn9hdlw2x05nmje49gwgzmqyvh0qmh6008yksetuvkfshfylvn"
)
!= -1
)
assert (
result.output.find(
(
"Wallet address 51 (m/12381n/8444n/2n/51n): "
"txch1006n6l3x5e8exar8mlj004znjl5pq0tq73h76kz0yergswnjzn8s3utl6c"
)
"Wallet address 51 (m/12381n/8444n/2n/51n): "
"txch1006n6l3x5e8exar8mlj004znjl5pq0tq73h76kz0yergswnjzn8s3utl6c"
)
!= -1
)
@ -1168,37 +1138,29 @@ class TestKeysCommands:
assert result.exit_code == 0
assert (
result.output.find(
(
"Observer public key 30 (m/12381n/8444n/2/3/4/30): "
"979a1fa0bfc140488d4a9edcfbf244a398fe922618a981cc0fffe5445d811f2237ff8234c0520b28b3096c8269f2731e"
)
"Observer public key 30 (m/12381n/8444n/2/3/4/30): "
"979a1fa0bfc140488d4a9edcfbf244a398fe922618a981cc0fffe5445d811f2237ff8234c0520b28b3096c8269f2731e"
)
!= -1
)
assert (
result.output.find(
(
"Observer private key 30 (m/12381n/8444n/2/3/4/30): "
"5dd22db24fe28805b101104c543f5bec3808328ad67de3d3dcd9efd6faab13aa"
)
"Observer private key 30 (m/12381n/8444n/2/3/4/30): "
"5dd22db24fe28805b101104c543f5bec3808328ad67de3d3dcd9efd6faab13aa"
)
!= -1
)
assert (
result.output.find(
(
"Observer public key 31 (m/12381n/8444n/2/3/4/31): "
"ab5885df340a27b5eb3f1c4b8c32889f529ad5ecc4c9718247e36756de2e143c604af9956941a72239124e6fb352782e"
)
"Observer public key 31 (m/12381n/8444n/2/3/4/31): "
"ab5885df340a27b5eb3f1c4b8c32889f529ad5ecc4c9718247e36756de2e143c604af9956941a72239124e6fb352782e"
)
!= -1
)
assert (
result.output.find(
(
"Observer private key 31 (m/12381n/8444n/2/3/4/31): "
"113610b39c2151fd68d7f795d5dd596b94889a3cf7825a56da5c6d2c7e5141a1"
)
"Observer private key 31 (m/12381n/8444n/2/3/4/31): "
"113610b39c2151fd68d7f795d5dd596b94889a3cf7825a56da5c6d2c7e5141a1"
)
!= -1
)

View File

@ -139,5 +139,5 @@ async def test_plugin_requests_use_custom_headers(
await data_layer.get_uploaders(tree_id=bytes32([0] * 32))
await data_layer.check_plugins()
header_values = set(request.headers.get(header_key) for request in recording_web_server.requests)
header_values = {request.headers.get(header_key) for request in recording_web_server.requests}
assert header_values == {header_value}

View File

@ -124,7 +124,7 @@ async def test_get_trees(raw_data_store: DataStore) -> None:
expected_tree_ids = set()
for n in range(10):
tree_id = bytes32((b"\0" * 31 + bytes([n])))
tree_id = bytes32(b"\0" * 31 + bytes([n]))
await raw_data_store.create_tree(tree_id=tree_id)
expected_tree_ids.add(tree_id)
@ -1130,9 +1130,9 @@ async def test_kv_diff_2(data_store: DataStore, tree_id: bytes32) -> None:
empty_hash = bytes32([0] * 32)
invalid_hash = bytes32([0] * 31 + [1])
diff_1 = await data_store.get_kv_diff(tree_id, empty_hash, insert_result.node_hash)
assert diff_1 == set([DiffData(OperationType.INSERT, b"000", b"000")])
assert diff_1 == {DiffData(OperationType.INSERT, b"000", b"000")}
diff_2 = await data_store.get_kv_diff(tree_id, insert_result.node_hash, empty_hash)
assert diff_2 == set([DiffData(OperationType.DELETE, b"000", b"000")])
assert diff_2 == {DiffData(OperationType.DELETE, b"000", b"000")}
diff_3 = await data_store.get_kv_diff(tree_id, invalid_hash, insert_result.node_hash)
assert diff_3 == set()
@ -1204,7 +1204,7 @@ async def test_server_selection(data_store: DataStore, tree_id: bytes32) -> None
Subscription(tree_id, [ServerInfo(f"http://127.0.0.1/{port}", 0, 0) for port in range(8000, 8010)])
)
free_servers = set(f"http://127.0.0.1/{port}" for port in range(8000, 8010))
free_servers = {f"http://127.0.0.1/{port}" for port in range(8000, 8010)}
tried_servers = 0
random = Random()
random.seed(100, version=2)
@ -1230,7 +1230,7 @@ async def test_server_selection(data_store: DataStore, tree_id: bytes32) -> None
random.shuffle(servers_info)
assert servers_info != []
selected_servers.add(servers_info[0].url)
assert selected_servers == set(f"http://127.0.0.1/{port}" for port in range(8000, 8010))
assert selected_servers == {f"http://127.0.0.1/{port}" for port in range(8000, 8010)}
for _ in range(100):
servers_info = await data_store.get_available_servers_for_store(tree_id=tree_id, timestamp=current_timestamp)

View File

@ -10,7 +10,7 @@ import pytest
# wallet nodes fixture below.
# https://github.com/pytest-dev/pytest-asyncio/blob/v0.18.1/pytest_asyncio/plugin.py#L479-L484
@pytest.fixture(scope="module")
def event_loop(request: "pytest.FixtureRequest") -> Iterator[asyncio.AbstractEventLoop]:
def event_loop(request: pytest.FixtureRequest) -> Iterator[asyncio.AbstractEventLoop]:
"""Create an instance of the default event loop for each test case."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop

View File

@ -824,7 +824,7 @@ coins = make_test_coins()
],
)
def test_can_replace(existing_items: List[MempoolItem], new_item: MempoolItem, expected: bool) -> None:
removals = set(c.name() for c in new_item.spend_bundle.removals())
removals = {c.name() for c in new_item.spend_bundle.removals()}
assert can_replace(existing_items, removals, new_item) == expected
@ -1406,10 +1406,10 @@ async def test_identical_spend_aggregation_e2e(simulator_and_wallet: SimulatorsA
full_node_api: FullNodeSimulator,
spent_coin_id: bytes32,
) -> Set[bytes32]:
return set(
return {
i.spend_bundle_name
for i in full_node_api.full_node.mempool_manager.mempool.get_items_by_coin_id(spent_coin_id)
)
}
async def send_to_mempool(
full_node: FullNodeSimulator, spend_bundle: SpendBundle, *, expecting_conflict: bool = False

View File

@ -67,9 +67,7 @@ async def main() -> None:
logger = create_logger(file=file)
async def f() -> None:
await asyncio.gather(
*[tcp_echo_client(task_counter="{}".format(i), logger=logger) for i in range(0, NUM_CLIENTS)]
)
await asyncio.gather(*[tcp_echo_client(task_counter=f"{i}", logger=logger) for i in range(0, NUM_CLIENTS)])
task = asyncio.create_task(f())
try:

View File

@ -28,7 +28,7 @@ class EchoServer(asyncio.Protocol):
def connection_made(self, transport: asyncio.BaseTransport) -> None:
peername = transport.get_extra_info("peername")
self.logger.info("connection from {}".format(peername))
self.logger.info(f"connection from {peername}")
self.transport = transport
def data_received(self, data: bytes) -> None:
@ -95,7 +95,7 @@ async def async_main(
[server_socket] = server.sockets
# TODO: review if this is general enough, such as for ipv6
port_holder.append(server_socket.getsockname()[1])
logger.info("serving on {}".format(server.sockets[0].getsockname()))
logger.info(f"serving on {server.sockets[0].getsockname()}")
try:
try:

View File

@ -37,7 +37,7 @@ class TestAPI:
# API call from FullNodeAPI
@api_request()
async def request_transaction(self, request: RequestTransaction) -> None:
raise ApiError(Err.NO_TRANSACTIONS_WHILE_SYNCING, f"Some error message: {request.transaction_id}", bytes(b"ab"))
raise ApiError(Err.NO_TRANSACTIONS_WHILE_SYNCING, f"Some error message: {request.transaction_id}", b"ab")
@pytest.mark.asyncio
@ -137,7 +137,7 @@ async def test_error_response(
error = ApiError(Err.NO_TRANSACTIONS_WHILE_SYNCING, error_message)
assert f"ApiError: {error} from {wallet_connection.peer_node_id}, {wallet_connection.peer_info}" in caplog.text
if test_version >= error_response_version:
assert response == Error(int16(Err.NO_TRANSACTIONS_WHILE_SYNCING.value), error_message, bytes(b"ab"))
assert response == Error(int16(Err.NO_TRANSACTIONS_WHILE_SYNCING.value), error_message, b"ab")
assert "Request timeout:" not in caplog.text
else:
assert response is None

View File

@ -63,7 +63,7 @@ async def test1(two_nodes_sim_and_wallets_services, self_hostname, consensus_mod
blocks = bt.get_consecutive_blocks(num_blocks, block_list_input=blocks, guarantee_transaction_block=True)
assert len(await client.get_unfinished_block_headers()) == 0
assert len((await client.get_block_records(0, 100))) == 0
assert len(await client.get_block_records(0, 100)) == 0
for block in blocks:
if is_overflow_block(bt.constants, block.reward_chain_block.signage_point_index):
finished_ss = block.finished_sub_slots[:-1]
@ -94,7 +94,7 @@ async def test1(two_nodes_sim_and_wallets_services, self_hostname, consensus_mod
assert (await client.get_block_record_by_height(2)).header_hash == blocks[2].header_hash
assert len((await client.get_block_records(0, 100))) == num_blocks * 2
assert len(await client.get_block_records(0, 100)) == num_blocks * 2
assert (await client.get_block_record_by_height(100)) is None

View File

@ -171,7 +171,7 @@ class TestConfig:
expected_content: str = initial_config_file("config.yaml")
assert len(expected_content) > 0
with open(config_file_path, "r") as f:
with open(config_file_path) as f:
actual_content: str = f.read()
# Expect: config.yaml contents are seeded with initial contents
assert actual_content == expected_content
@ -197,7 +197,7 @@ class TestConfig:
expected_content: str = initial_config_file("config.yaml")
assert len(expected_content) > 0
with open(config_file_path, "r") as f:
with open(config_file_path) as f:
actual_content: str = f.read()
# Expect: config.yaml contents are overwritten with initial contents
assert actual_content == expected_content

View File

@ -206,14 +206,12 @@ class TestKeyringWrapper:
assert KeyringWrapper.get_shared_instance().get_passphrase("service-abc", "user-xyz") is None
# When: setting a passphrase
KeyringWrapper.get_shared_instance().set_passphrase(
"service-abc", "user-xyz", "super secret passphrase".encode().hex()
)
KeyringWrapper.get_shared_instance().set_passphrase("service-abc", "user-xyz", b"super secret passphrase".hex())
# Expect: passphrase lookup should succeed
assert (
KeyringWrapper.get_shared_instance().get_passphrase("service-abc", "user-xyz")
== "super secret passphrase".encode().hex()
== b"super secret passphrase".hex()
)
# Expect: non-existent passphrase lookup should fail
@ -227,25 +225,21 @@ class TestKeyringWrapper:
Overwriting a previously-set passphrase should work
"""
# When: initially setting the passphrase
KeyringWrapper.get_shared_instance().set_passphrase(
"service-xyz", "user-123", "initial passphrase".encode().hex()
)
KeyringWrapper.get_shared_instance().set_passphrase("service-xyz", "user-123", b"initial passphrase".hex())
# Expect: passphrase lookup should succeed
assert (
KeyringWrapper.get_shared_instance().get_passphrase("service-xyz", "user-123")
== "initial passphrase".encode().hex()
== b"initial passphrase".hex()
)
# When: updating the same passphrase
KeyringWrapper.get_shared_instance().set_passphrase(
"service-xyz", "user-123", "updated passphrase".encode().hex()
)
KeyringWrapper.get_shared_instance().set_passphrase("service-xyz", "user-123", b"updated passphrase".hex())
# Expect: the updated passphrase should be retrieved
assert (
KeyringWrapper.get_shared_instance().get_passphrase("service-xyz", "user-123")
== "updated passphrase".encode().hex()
== b"updated passphrase".hex()
)
# When: using a new empty keyring
@ -257,12 +251,11 @@ class TestKeyringWrapper:
KeyringWrapper.get_shared_instance().delete_passphrase("some service", "some user")
# When: setting a passphrase
KeyringWrapper.get_shared_instance().set_passphrase("some service", "some user", "500p3r 53cr37".encode().hex())
KeyringWrapper.get_shared_instance().set_passphrase("some service", "some user", b"500p3r 53cr37".hex())
# Expect: passphrase retrieval should succeed
assert (
KeyringWrapper.get_shared_instance().get_passphrase("some service", "some user")
== "500p3r 53cr37".encode().hex()
KeyringWrapper.get_shared_instance().get_passphrase("some service", "some user") == b"500p3r 53cr37".hex()
)
# When: deleting the passphrase

View File

@ -78,8 +78,8 @@ class TestData:
removed_paths: List[Path] = [p.prover.get_filename() for p in removed] if removed is not None else []
invalid_dict: Dict[Path, int] = {p.prover.get_filename(): 0 for p in self.invalid}
keys_missing_set: Set[Path] = set([p.prover.get_filename() for p in self.keys_missing])
duplicates_set: Set[str] = set([p.prover.get_filename() for p in self.duplicates])
keys_missing_set: Set[Path] = {p.prover.get_filename() for p in self.keys_missing}
duplicates_set: Set[str] = {p.prover.get_filename() for p in self.duplicates}
# Inject invalid plots into `PlotManager` of the harvester so that the callback calls below can use them
# to sync them to the farmer.
@ -427,7 +427,7 @@ async def test_sync_reset_cases(
started_sync_id: uint64 = uint64(0)
plot_manager.failed_to_open_filenames = {p.prover.get_filename(): 0 for p in test_data.invalid}
plot_manager.no_key_filenames = set([p.prover.get_filename() for p in test_data.keys_missing])
plot_manager.no_key_filenames = {p.prover.get_filename() for p in test_data.keys_missing}
async def wait_for_reset() -> bool:
assert started_sync_id != 0

View File

@ -26,9 +26,9 @@ def float_to_str(f):
zero_padding = "0" * (abs(int(exp)) - 1) # minus 1 for decimal point in the sci notation
sign = "-" if f < 0 else ""
if exp > 0:
float_string = "{}{}{}.0".format(sign, digits, zero_padding)
float_string = f"{sign}{digits}{zero_padding}.0"
else:
float_string = "{}0.{}{}".format(sign, zero_padding, digits)
float_string = f"{sign}0.{zero_padding}{digits}"
return float_string

View File

@ -44,7 +44,7 @@ from chia.util.ints import int16, uint8, uint16, uint32, uint64, uint128
# SHARED PROTOCOL
error_without_data = Error(int16(Err.UNKNOWN.value), "Unknown", None)
error_with_data = Error(int16(Err.UNKNOWN.value), "Unknown", bytes(b"extra data"))
error_with_data = Error(int16(Err.UNKNOWN.value), "Unknown", b"extra data")
### FARMER PROTOCOL

View File

@ -290,7 +290,7 @@ class TestDLWallet:
[previous_record.lineage_proof.amount],
[previous_record.inner_puzzle_hash],
DEFAULT_TX_CONFIG,
coins=set([txs[0].spend_bundle.removals()[0]]),
coins={txs[0].spend_bundle.removals()[0]},
fee=uint64(1999999999999),
)

View File

@ -246,11 +246,11 @@ async def test_nft_mint_from_did_rpc(
metadata_list = [
{
"hash": bytes32.random(seeded_random).hex(),
"uris": ["https://data.com/{}".format(i)],
"uris": [f"https://data.com/{i}"],
"meta_hash": bytes32.random(seeded_random).hex(),
"meta_uris": ["https://meatadata.com/{}".format(i)],
"meta_uris": [f"https://meatadata.com/{i}"],
"license_hash": bytes32.random(seeded_random).hex(),
"license_uris": ["https://license.com/{}".format(i)],
"license_uris": [f"https://license.com/{i}"],
"edition_number": i + 1,
"edition_total": n,
}
@ -274,7 +274,7 @@ async def test_nft_mint_from_did_rpc(
)[0]
did_lineage_parent = None
spends = []
nft_ids = set([])
nft_ids = set()
for i in range(0, n, chunk):
await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_maker, timeout=20)
resp: Dict[str, Any] = await client.nft_mint_bulk(
@ -439,11 +439,11 @@ async def test_nft_mint_from_did_rpc_no_royalties(
metadata_list = [
{
"hash": bytes32.random(seeded_random).hex(),
"uris": ["https://data.com/{}".format(i)],
"uris": [f"https://data.com/{i}"],
"meta_hash": bytes32.random(seeded_random).hex(),
"meta_uris": ["https://meatadata.com/{}".format(i)],
"meta_uris": [f"https://meatadata.com/{i}"],
"license_hash": bytes32.random(seeded_random).hex(),
"license_uris": ["https://license.com/{}".format(i)],
"license_uris": [f"https://license.com/{i}"],
"edition_number": i + 1,
"edition_total": n,
}
@ -841,11 +841,11 @@ async def test_nft_mint_from_xch_rpc(
metadata_list = [
{
"hash": bytes32.random(seeded_random).hex(),
"uris": ["https://data.com/{}".format(i)],
"uris": [f"https://data.com/{i}"],
"meta_hash": bytes32.random(seeded_random).hex(),
"meta_uris": ["https://meatadata.com/{}".format(i)],
"meta_uris": [f"https://meatadata.com/{i}"],
"license_hash": bytes32.random(seeded_random).hex(),
"license_uris": ["https://license.com/{}".format(i)],
"license_uris": [f"https://license.com/{i}"],
"edition_number": i + 1,
"edition_total": n,
}

View File

@ -1346,23 +1346,23 @@ async def test_get_coin_records_by_names(wallet_rpc_environment: WalletRpcTestEn
assert await client.get_coin_records_by_names([]) == []
# 2. All coins
rpc_result = await client.get_coin_records_by_names(coin_ids + coin_ids_unspent)
assert set(record.coin for record in rpc_result) == {*coins, *coins_unspent}
assert {record.coin for record in rpc_result} == {*coins, *coins_unspent}
# 3. All spent coins
rpc_result = await client.get_coin_records_by_names(coin_ids, include_spent_coins=True)
assert set(record.coin for record in rpc_result) == coins
assert {record.coin for record in rpc_result} == coins
# 4. All unspent coins
rpc_result = await client.get_coin_records_by_names(coin_ids_unspent, include_spent_coins=False)
assert set(record.coin for record in rpc_result) == coins_unspent
assert {record.coin for record in rpc_result} == coins_unspent
# 5. Filter start/end height
filter_records = result.records[:10]
assert len(filter_records) == 10
filter_coin_ids = [record.name() for record in filter_records]
filter_coins = set(record.coin for record in filter_records)
filter_coins = {record.coin for record in filter_records}
min_height = min(record.confirmed_block_height for record in filter_records)
max_height = max(record.confirmed_block_height for record in filter_records)
assert min_height != max_height
rpc_result = await client.get_coin_records_by_names(filter_coin_ids, start_height=min_height, end_height=max_height)
assert set(record.coin for record in rpc_result) == filter_coins
assert {record.coin for record in rpc_result} == filter_coins
# 8. Test the failure case
with pytest.raises(ValueError, match="not found"):
await client.get_coin_records_by_names(coin_ids, include_spent_coins=False)

View File

@ -73,7 +73,7 @@ class TestPuzzleStore:
assert await db.index_for_pubkey(derivation_recs[0].pubkey) is None
assert await db.index_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
assert await db.get_wallet_identifier_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
assert len((await db.get_all_puzzle_hashes())) == 0
assert len(await db.get_all_puzzle_hashes()) == 0
assert await db.get_last_derivation_path() is None
assert await db.get_unused_derivation_path() is None
assert await db.get_derivation_record(0, 2, False) is None
@ -88,7 +88,7 @@ class TestPuzzleStore:
derivation_recs[2].wallet_id,
derivation_recs[2].wallet_type,
)
assert len((await db.get_all_puzzle_hashes())) == 2000
assert len(await db.get_all_puzzle_hashes()) == 2000
assert await db.get_last_derivation_path() == 999
assert await db.get_unused_derivation_path() == 0
assert await db.get_derivation_record(0, 2, False) == derivation_recs[1]

View File

@ -114,8 +114,8 @@ async def test_only_odd_coins_0(bt):
spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())])
coins_added, coins_removed, _ = await check_spend_bundle_validity(bt, blocks, spend_bundle)
coin_set_added = set([_.coin for _ in coins_added])
coin_set_removed = set([_.coin for _ in coins_removed])
coin_set_added = {_.coin for _ in coins_added}
coin_set_removed = {_.coin for _ in coins_removed}
launcher_coin = launcher_spend_bundle.coin_spends[0].coin

View File

@ -358,7 +358,7 @@ def claim_p2_singleton(
SerializedProgram.from_program(p2_singleton_puzzle),
p2_singleton_solution,
)
expected_p2_singleton_announcement = Announcement(p2_singleton_coin_name, bytes(b"$")).name()
expected_p2_singleton_announcement = Announcement(p2_singleton_coin_name, b"$").name()
singleton_conditions = [
Program.to([ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, p2_singleton_coin_name]),
Program.to([ConditionOpcode.CREATE_COIN, inner_puzzle_hash, 1]),

View File

@ -824,21 +824,19 @@ async def test_valid_times_migration() -> None:
async with DBConnection(1) as db_wrapper:
async with db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS transaction_record("
" transaction_record blob,"
" bundle_id text PRIMARY KEY,"
" confirmed_at_height bigint,"
" created_at_time bigint,"
" to_puzzle_hash text,"
" amount blob,"
" fee_amount blob,"
" confirmed int,"
" sent int,"
" wallet_id bigint,"
" trade_id text,"
" type int)"
)
"CREATE TABLE IF NOT EXISTS transaction_record("
" transaction_record blob,"
" bundle_id text PRIMARY KEY,"
" confirmed_at_height bigint,"
" created_at_time bigint,"
" to_puzzle_hash text,"
" amount blob,"
" fee_amount blob,"
" confirmed int,"
" sent int,"
" wallet_id bigint,"
" trade_id text,"
" type int)"
)
old_record = TransactionRecordOld(

View File

@ -241,19 +241,19 @@ async def test_get_unspent_coins_for_wallet() -> None:
await store.add_coin_record(record_7) # wallet 2
await store.add_coin_record(record_8)
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(2) == set([record_7])
assert await store.get_unspent_coins_for_wallet(1) == {record_5}
assert await store.get_unspent_coins_for_wallet(2) == {record_7}
assert await store.get_unspent_coins_for_wallet(3) == set()
await store.set_spent(coin_4.name(), uint32(12))
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(2) == set([record_7])
assert await store.get_unspent_coins_for_wallet(1) == {record_5}
assert await store.get_unspent_coins_for_wallet(2) == {record_7}
assert await store.get_unspent_coins_for_wallet(3) == set()
await store.set_spent(coin_7.name(), uint32(12))
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(1) == {record_5}
assert await store.get_unspent_coins_for_wallet(2) == set()
assert await store.get_unspent_coins_for_wallet(3) == set()
@ -263,7 +263,7 @@ async def test_get_unspent_coins_for_wallet() -> None:
assert await store.get_unspent_coins_for_wallet(2) == set()
assert await store.get_unspent_coins_for_wallet(3) == set()
assert await store.get_unspent_coins_for_wallet(1, coin_type=CoinType.CLAWBACK) == set([record_8])
assert await store.get_unspent_coins_for_wallet(1, coin_type=CoinType.CLAWBACK) == {record_8}
@pytest.mark.asyncio
@ -277,30 +277,30 @@ async def test_get_all_unspent_coins() -> None:
await store.add_coin_record(record_2) # not spent
await store.add_coin_record(record_3) # spent
await store.add_coin_record(record_8) # spent
assert await store.get_all_unspent_coins() == set([record_1, record_2])
assert await store.get_all_unspent_coins() == {record_1, record_2}
await store.add_coin_record(record_4) # spent
await store.add_coin_record(record_5) # not spent
await store.add_coin_record(record_6) # spent
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5])
assert await store.get_all_unspent_coins() == {record_1, record_2, record_5}
await store.add_coin_record(record_7) # not spent
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5, record_7])
assert await store.get_all_unspent_coins() == {record_1, record_2, record_5, record_7}
await store.set_spent(coin_4.name(), uint32(12))
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5, record_7])
assert await store.get_all_unspent_coins() == {record_1, record_2, record_5, record_7}
await store.set_spent(coin_7.name(), uint32(12))
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5])
assert await store.get_all_unspent_coins() == {record_1, record_2, record_5}
await store.set_spent(coin_5.name(), uint32(12))
assert await store.get_all_unspent_coins() == set([record_1, record_2])
assert await store.get_all_unspent_coins() == {record_1, record_2}
await store.set_spent(coin_2.name(), uint32(12))
await store.set_spent(coin_1.name(), uint32(12))
assert await store.get_all_unspent_coins() == set()
assert await store.get_all_unspent_coins(coin_type=CoinType.CLAWBACK) == set([record_8])
assert await store.get_all_unspent_coins(coin_type=CoinType.CLAWBACK) == {record_8}
@pytest.mark.asyncio
@ -316,8 +316,8 @@ async def test_get_records_by_parent_id() -> None:
await store.add_coin_record(record_6)
await store.add_coin_record(record_7)
assert set(await store.get_coin_records_by_parent_id(coin_1.parent_coin_info)) == set([record_1, record_2])
assert set(await store.get_coin_records_by_parent_id(coin_2.parent_coin_info)) == set([record_1, record_2])
assert set(await store.get_coin_records_by_parent_id(coin_1.parent_coin_info)) == {record_1, record_2}
assert set(await store.get_coin_records_by_parent_id(coin_2.parent_coin_info)) == {record_1, record_2}
assert await store.get_coin_records_by_parent_id(coin_3.parent_coin_info) == [record_3]
assert await store.get_coin_records_by_parent_id(coin_4.parent_coin_info) == [record_4]
assert await store.get_coin_records_by_parent_id(coin_5.parent_coin_info) == [record_5]

View File

@ -122,16 +122,14 @@ async def test_valid_times_migration() -> None:
async with DBConnection(1) as db_wrapper:
async with db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS trade_records("
" trade_record blob,"
" trade_id text PRIMARY KEY,"
" status int,"
" confirmed_at_index int,"
" created_at_time bigint,"
" sent int,"
" is_my_offer tinyint)"
)
"CREATE TABLE IF NOT EXISTS trade_records("
" trade_record blob,"
" trade_id text PRIMARY KEY,"
" status int,"
" confirmed_at_index int,"
" created_at_time bigint,"
" sent int,"
" is_my_offer tinyint)"
)
fake_offer = Offer({}, SpendBundle([], G2Element()), {})

View File

@ -48,7 +48,7 @@ def fontcolor(pct: float) -> str:
@lru_cache(maxsize=10000)
def resolve_function(file: str, line: int) -> str:
try:
with open(file, "r") as f:
with open(file) as f:
all_lines: List[str] = []
for row in f:
all_lines.append(row)