Merge and disable tests

This commit is contained in:
Mariano Sorgente 2021-06-07 11:01:11 -04:00
commit 7fd16a5278
No known key found for this signature in database
GPG Key ID: E4B42B7E891A0AE3
63 changed files with 281 additions and 132 deletions

View File

@ -41,7 +41,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
env:
cache-name: cache-node-modules
with:
@ -59,7 +59,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -41,7 +41,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
env:
cache-name: cache-node-modules
with:
@ -59,7 +59,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -45,7 +45,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -40,7 +40,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -53,7 +53,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -35,7 +35,7 @@ jobs:
echo "::set-output name=dir::$(npm config get cache)"
- name: Cache npm
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -48,7 +48,7 @@ jobs:
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.5
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}

View File

@ -48,7 +48,7 @@ jobs:
# Run Linter against code base #
################################
- name: Lint Code Base
uses: github/super-linter@v3.15.5
uses: github/super-linter@v4.0.2
# uses: docker://github/super-linter:v3.10.2
env:
VALIDATE_ALL_CODEBASE: true

View File

@ -9,6 +9,7 @@ the virtual environment.
```bash
. ./activate
chmod +x ./install-timelord.sh
sh install-timelord.sh
```

View File

@ -11,8 +11,8 @@
Chia is a modern cryptocurrency built from scratch, designed to be efficient, decentralized, and secure. Here are some of the features and benefits:
* [Proof of space and time](https://docs.google.com/document/d/1tmRIb7lgi4QfKkNaxuKOBHRmwbVlGL4f7EsBDr_5xZE/edit) based consensus which allows anyone to farm with commodity hardware
* Very easy to use full node and farmer GUI and cli (thousands of nodes active on mainnet)
* Simplified UTXO based transaction model, with small on chain state
* Lisp-style turing complete functional [programming language](https://chialisp.com/) for money related use cases
* Simplified UTXO based transaction model, with small on-chain state
* Lisp-style Turing-complete functional [programming language](https://chialisp.com/) for money related use cases
* BLS keys and aggregate signatures (only one signature per block)
* [Pooling protocol](https://www.chia.net/2020/11/10/pools-in-chia.html) (in development) that allows farmers to have control of making blocks
* Support for light clients with fast, objective syncing

View File

@ -14,6 +14,8 @@ def configure(
set_fullnode_port: str,
set_log_level: str,
enable_upnp: str,
set_outbound_peer_count: str,
set_peer_count: str,
testnet: str,
):
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
@ -72,6 +74,14 @@ def configure(
else:
print("uPnP disabled")
change_made = True
if set_outbound_peer_count is not None:
config["full_node"]["target_outbound_peer_count"] = int(set_outbound_peer_count)
print("Target outbound peer count updated")
change_made = True
if set_peer_count is not None:
config["full_node"]["target_peer_count"] = int(set_peer_count)
print("Target peer count updated")
change_made = True
if testnet is not None:
if testnet == "true" or testnet == "t":
print("Setting Testnet")
@ -133,7 +143,10 @@ def configure(
@click.command("configure", short_help="Modify configuration")
@click.option(
"--testnet", "-t", help="configures for connection to testnet", type=click.Choice(["true", "t", "false", "f"])
"--testnet",
"-t",
help="configures for connection to testnet",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option("--set-node-introducer", help="Set the introducer for node - IP:Port", type=str)
@click.option("--set-farmer-peer", help="Set the farmer peer for harvester - IP:Port", type=str)
@ -150,10 +163,30 @@ def configure(
type=click.Choice(["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]),
)
@click.option(
"--enable-upnp", "--upnp", "-upnp", help="Enable or disable uPnP", type=click.Choice(["true", "t", "false", "f"])
"--enable-upnp",
"--upnp",
"-upnp",
help="Enable or disable uPnP",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option(
"--set_outbound-peer-count",
help="Update the target outbound peer count (default 8)",
type=str,
)
@click.option("--set-peer-count", help="Update the target peer count (default 80)", type=str)
@click.pass_context
def configure_cmd(ctx, set_farmer_peer, set_node_introducer, set_fullnode_port, set_log_level, enable_upnp, testnet):
def configure_cmd(
ctx,
set_farmer_peer,
set_node_introducer,
set_fullnode_port,
set_log_level,
enable_upnp,
set_outbound_peer_count,
set_peer_count,
testnet,
):
configure(
ctx.obj["root_path"],
set_farmer_peer,
@ -161,5 +194,7 @@ def configure_cmd(ctx, set_farmer_peer, set_node_introducer, set_fullnode_port,
set_fullnode_port,
set_log_level,
enable_upnp,
set_outbound_peer_count,
set_peer_count,
testnet,
)

View File

@ -75,8 +75,13 @@ def get_transactions_cmd(wallet_rpc_port: int, fingerprint: int, id: int, offset
required=True,
)
@click.option("-t", "--address", help="Address to send the XCH", type=str, required=True)
def send_cmd(wallet_rpc_port: int, fingerprint: int, id: int, amount: str, fee: str, address: str) -> None:
extra_params = {"id": id, "amount": amount, "fee": fee, "address": address}
@click.option(
"-o", "--override", help="Submits transaction without checking for unusual values", is_flag=True, default=False
)
def send_cmd(
wallet_rpc_port: int, fingerprint: int, id: int, amount: str, fee: str, address: str, confirm: bool
) -> None:
extra_params = {"id": id, "amount": amount, "fee": fee, "address": address, "confirm": confirm}
import asyncio
from .wallet_funcs import execute_with_wallet, send

View File

@ -68,12 +68,23 @@ async def get_transactions(args: dict, wallet_client: WalletRpcClient, fingerpri
break
def check_unusual_transaction(amount: Decimal, fee: Decimal):
return fee >= amount
async def send(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
wallet_id = args["id"]
amount = Decimal(args["amount"])
fee = Decimal(args["fee"])
address = args["address"]
override = args["override"]
if not override and check_unusual_transaction(amount, fee):
print(
f"A transaction of amount {amount} and fee {fee} is unusual.\n"
f"Pass in --override if you are sure you mean to do this."
)
return
print("Submitting transaction...")
final_amount = uint64(int(amount * units["chia"]))
final_fee = uint64(int(fee * units["chia"]))

View File

@ -256,9 +256,9 @@ class Blockchain(BlockchainInterface):
# Always add the block to the database
async with self.block_store.db_wrapper.lock:
try:
header_hash: bytes32 = block.header_hash
# Perform the DB operations to update the state, and rollback if something goes wrong
await self.block_store.db_wrapper.begin_transaction()
header_hash: bytes32 = block.header_hash
await self.block_store.add_full_block(header_hash, block, block_record)
fork_height, peak_height, records = await self._reconsider_peak(
block_record, genesis, fork_point_with_peak, npc_result

View File

@ -760,6 +760,13 @@ def launch_plotter(root_path: Path, service_name: str, service_array: List[str],
startupinfo = subprocess.STARTUPINFO() # type: ignore
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW # type: ignore
# Windows-specific.
# If the current process group is used, CTRL_C_EVENT will kill the parent and everyone in the group!
try:
creationflags: int = subprocess.CREATE_NEW_PROCESS_GROUP # type: ignore
except AttributeError: # Not on Windows.
creationflags = 0
plotter_path = plotter_log_path(root_path, id)
if plotter_path.parent.exists():
@ -769,7 +776,14 @@ def launch_plotter(root_path: Path, service_name: str, service_array: List[str],
mkdir(plotter_path.parent)
outfile = open(plotter_path.resolve(), "w")
log.info(f"Service array: {service_array}")
process = subprocess.Popen(service_array, shell=False, stderr=outfile, stdout=outfile, startupinfo=startupinfo)
process = subprocess.Popen(
service_array,
shell=False,
stderr=outfile,
stdout=outfile,
startupinfo=startupinfo,
creationflags=creationflags,
)
pid_path = pid_path_for_service(root_path, service_name, id)
try:

View File

@ -127,7 +127,12 @@ class BlockStore:
return None
def rollback_cache_block(self, header_hash: bytes32):
self.block_cache.remove(header_hash)
try:
self.block_cache.remove(header_hash)
except KeyError:
# this is best effort. When rolling back, we may not have added the
# block to the cache yet
pass
async def get_full_block(self, header_hash: bytes32) -> Optional[FullBlock]:
cached = self.block_cache.get(header_hash)

View File

@ -68,6 +68,7 @@ class FullNode:
mempool_manager: MempoolManager
connection: aiosqlite.Connection
_sync_task: Optional[asyncio.Task]
_init_weight_proof: Optional[asyncio.Task] = None
blockchain: Blockchain
config: Dict
server: Any
@ -128,10 +129,10 @@ class FullNode:
self.blockchain = await Blockchain.create(self.coin_store, self.block_store, self.constants)
self.mempool_manager = MempoolManager(self.coin_store, self.constants)
self.weight_proof_handler = None
asyncio.create_task(self.initialize_weight_proof())
self._init_weight_proof = asyncio.create_task(self.initialize_weight_proof())
if self.config.get("enable_profiler", False):
asyncio.create_task(profile_task(self.root_path, self.log))
asyncio.create_task(profile_task(self.root_path, "node", self.log))
self._sync_task = None
self._segment_task = None
@ -191,6 +192,7 @@ class FullNode:
self.config["introducer_peer"],
dns_servers,
self.config["peer_connect_interval"],
self.config["selected_network"],
self.log,
)
except Exception as e:
@ -531,6 +533,8 @@ class FullNode:
def _close(self):
self._shut_down = True
if self._init_weight_proof is not None:
self._init_weight_proof.cancel()
if self.blockchain is not None:
self.blockchain.shut_down()
if self.mempool_manager is not None:
@ -545,6 +549,8 @@ class FullNode:
for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()):
cancel_task_safe(task, self.log)
await self.connection.close()
if self._init_weight_proof is not None:
await asyncio.wait([self._init_weight_proof])
async def _sync(self):
"""

View File

@ -46,7 +46,7 @@ def _get_filenames(directory: Path) -> List[Path]:
if child.suffix == ".plot" and not child.name.startswith("._"):
all_files.append(child)
else:
log.info(f"Not checking subdirectory {child}, subdirectories not added by default")
log.debug(f"Not checking subdirectory {child}, subdirectories not added by default")
except Exception as e:
log.warning(f"Error reading directory {directory} {e}")
return all_files

View File

@ -95,7 +95,7 @@ class FullNodeRpcClient(RpcClient):
d["end_height"] = end_height
return [
CoinRecord.from_json_dict(coin)
for coin in ((await self.fetch("get_coin_records_by_puzzle_hash", d))["coin_records"])
for coin in (await self.fetch("get_coin_records_by_puzzle_hash", d))["coin_records"]
]
async def get_coin_records_by_puzzle_hashes(
@ -113,7 +113,7 @@ class FullNodeRpcClient(RpcClient):
d["end_height"] = end_height
return [
CoinRecord.from_json_dict(coin)
for coin in ((await self.fetch("get_coin_records_by_puzzle_hashes", d))["coin_records"])
for coin in (await self.fetch("get_coin_records_by_puzzle_hashes", d))["coin_records"]
]
async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[CoinRecord], List[CoinRecord]]:

View File

@ -44,11 +44,11 @@ class RpcServer:
await self.websocket.close()
async def _state_changed(self, *args):
change = args[0]
if self.websocket is None:
return None
payloads: List[Dict] = await self.rpc_api._state_changed(*args)
change = args[0]
if change == "add_connection" or change == "close_connection":
data = await self.get_connections({})
if data is not None:
@ -222,13 +222,10 @@ class RpcServer:
except Exception as e:
tb = traceback.format_exc()
self.log.warning(f"Error while handling message: {tb}")
if len(e.args) > 0:
error = {"success": False, "error": f"{e.args[0]}"}
else:
error = {"success": False, "error": f"{e}"}
if message is None:
return None
await websocket.send_str(format_response(message, error))
if message is not None:
error = e.args[0] if e.args else e
res = {"success": False, "error": f"{error}"}
await websocket.send_str(format_response(message, res))
async def connection(self, ws):
data = {"service": self.service_name}

View File

@ -358,6 +358,7 @@ class WalletRpcApi:
"colour": colour,
"wallet_id": cc_wallet.id(),
}
elif request["mode"] == "existing":
async with self.service.wallet_state_manager.lock:
cc_wallet = await CCWallet.create_wallet_for_cc(
@ -365,6 +366,10 @@ class WalletRpcApi:
)
asyncio.create_task(self._create_backup_and_upload(host))
return {"type": cc_wallet.type()}
else: # undefined mode
pass
elif request["wallet_type"] == "rl_wallet":
if request["rl_type"] == "admin":
log.info("Create rl admin wallet")
@ -386,6 +391,7 @@ class WalletRpcApi:
"origin": rl_admin.rl_info.rl_origin,
"pubkey": rl_admin.rl_info.admin_pubkey.hex(),
}
elif request["rl_type"] == "user":
log.info("Create rl user wallet")
async with self.service.wallet_state_manager.lock:
@ -397,6 +403,10 @@ class WalletRpcApi:
"type": rl_user.type(),
"pubkey": rl_user.rl_info.user_pubkey.hex(),
}
else: # undefined rl_type
pass
elif request["wallet_type"] == "did_wallet":
if request["did_type"] == "new":
backup_dids = []
@ -420,6 +430,7 @@ class WalletRpcApi:
"my_did": my_did,
"wallet_id": did_wallet.id(),
}
elif request["did_type"] == "recovery":
async with self.service.wallet_state_manager.lock:
did_wallet = await DIDWallet.create_new_did_wallet_from_recovery(
@ -482,6 +493,14 @@ class WalletRpcApi:
elif request["mode"] == "recovery":
raise ValueError("Need upgraded singleton for on-chain recovery")
else: # undefined did_type
pass
else: # undefined wallet_type
pass
return None
##########################################################################################
# Wallet
##########################################################################################
@ -820,22 +839,22 @@ class WalletRpcApi:
else:
new_amount_verifications_required = len(recovery_list)
async with self.service.wallet_state_manager.lock:
success = await wallet.update_recovery_list(recovery_list, new_amount_verifications_required)
update_success = await wallet.update_recovery_list(recovery_list, new_amount_verifications_required)
# Update coin with new ID info
updated_puz = await wallet.get_new_puzzle()
spend_bundle = await wallet.create_spend(updated_puz.get_tree_hash())
if spend_bundle is not None and success:
return {"success": True}
return {"success": False}
success = spend_bundle is not None and update_success
return {"success": success}
async def did_spend(self, request):
wallet_id = int(request["wallet_id"])
async with self.service.wallet_state_manager.lock:
wallet: DIDWallet = self.service.wallet_state_manager.wallets[wallet_id]
spend_bundle = await wallet.create_spend(request["puzzlehash"])
if spend_bundle is not None:
return {"success": True}
return {"success": False}
success = spend_bundle is not None
return {"success": success}
async def did_get_did(self, request):
wallet_id = int(request["wallet_id"])

View File

@ -37,12 +37,19 @@ class FullNodeDiscovery:
introducer_info: Optional[Dict],
dns_servers: List[str],
peer_connect_interval: int,
selected_network: str,
log,
):
self.server: ChiaServer = server
self.message_queue: asyncio.Queue = asyncio.Queue()
self.is_closed = False
self.target_outbound_count = target_outbound_count
# This is a double check to make sure testnet and mainnet peer databases never mix up.
# If the network is not 'mainnet', it names the peer db differently, including the selected_network.
if selected_network != "mainnet":
if not peer_db_path.endswith(".sqlite"):
raise ValueError(f"Invalid path for peer table db: {peer_db_path}. Make the path end with .sqlite")
peer_db_path = peer_db_path[:-7] + "_" + selected_network + ".sqlite"
self.peer_db_path = path_from_root(root_path, peer_db_path)
self.dns_servers = dns_servers
if introducer_info is not None:
@ -63,12 +70,9 @@ class FullNodeDiscovery:
self.serialize_task: Optional[asyncio.Task] = None
self.cleanup_task: Optional[asyncio.Task] = None
self.initial_wait: int = 0
try:
self.resolver: Optional[dns.asyncresolver.Resolver] = dns.asyncresolver.Resolver()
except Exception as e:
self.log.error(e)
self.resolver = None
self.pending_outbound_connections: Set = set()
self.resolver = dns.asyncresolver.Resolver()
self.pending_outbound_connections: Set[str] = set()
self.pending_tasks: Set[asyncio.Task] = set()
async def initialize_address_manager(self) -> None:
mkdir(self.peer_db_path.parent)
@ -92,6 +96,10 @@ class FullNodeDiscovery:
self.cancel_task_safe(self.connect_peers_task)
self.cancel_task_safe(self.serialize_task)
self.cancel_task_safe(self.cleanup_task)
for t in self.pending_tasks:
self.cancel_task_safe(t)
if len(self.pending_tasks) > 0:
await asyncio.wait(self.pending_tasks)
await self.connection.close()
def cancel_task_safe(self, task: Optional[asyncio.Task]):
@ -198,8 +206,6 @@ class FullNodeDiscovery:
try:
if self.address_manager is None:
return
if addr.host in self.pending_outbound_connections:
return
self.pending_outbound_connections.add(addr.host)
client_connected = await self.server.start_client(
addr,
@ -371,15 +377,19 @@ class FullNodeDiscovery:
if not initiate_connection:
connect_peer_interval += 15
connect_peer_interval = min(connect_peer_interval, self.peer_connect_interval)
if addr is not None and initiate_connection:
while len(self.pending_outbound_connections) >= MAX_CONCURRENT_OUTBOUND_CONNECTIONS:
self.log.debug(
f"Max concurrent outbound connections reached. Retrying in {connect_peer_interval}s."
)
await asyncio.sleep(connect_peer_interval)
self.log.debug(f"Creating connection task with {addr}.")
asyncio.create_task(self.start_client_async(addr, disconnect_after_handshake))
if addr is not None and initiate_connection and addr.host not in self.pending_outbound_connections:
if len(self.pending_outbound_connections) >= MAX_CONCURRENT_OUTBOUND_CONNECTIONS:
self.log.debug("Max concurrent outbound connections reached. waiting")
await asyncio.wait(self.pending_tasks, return_when=asyncio.FIRST_COMPLETED)
self.pending_tasks.add(
asyncio.create_task(self.start_client_async(addr, disconnect_after_handshake))
)
await asyncio.sleep(connect_peer_interval)
# prune completed connect tasks
self.pending_task = set(filter(lambda t: not t.done(), self.pending_tasks))
except Exception as e:
self.log.error(f"Exception in create outbound connections: {e}")
self.log.error(f"Traceback: {traceback.format_exc()}")
@ -467,6 +477,7 @@ class FullNodePeers(FullNodeDiscovery):
introducer_info,
dns_servers,
peer_connect_interval,
selected_network,
log,
):
super().__init__(
@ -477,6 +488,7 @@ class FullNodePeers(FullNodeDiscovery):
introducer_info,
dns_servers,
peer_connect_interval,
selected_network,
log,
)
self.relay_queue = asyncio.Queue()
@ -491,8 +503,8 @@ class FullNodePeers(FullNodeDiscovery):
async def close(self):
await self._close_common()
self.self_advertise_task.cancel()
self.address_relay_task.cancel()
self.cancel_task_safe(self.self_advertise_task)
self.cancel_task_safe(self.address_relay_task)
async def _periodically_self_advertise_and_clean_data(self):
while not self.is_closed:
@ -636,6 +648,7 @@ class WalletPeers(FullNodeDiscovery):
introducer_info,
dns_servers,
peer_connect_interval,
selected_network,
log,
) -> None:
super().__init__(
@ -646,6 +659,7 @@ class WalletPeers(FullNodeDiscovery):
introducer_info,
dns_servers,
peer_connect_interval,
selected_network,
log,
)

View File

@ -1,4 +1,5 @@
import asyncio
import os
import logging
import logging.config
import signal
@ -25,6 +26,11 @@ from chia.util.ints import uint16
from .reconnect_task import start_reconnect_task
# this is used to detect whether we are running in the main process or not, in
# signal handlers. We need to ignore signals in the sub processes.
main_pid: Optional[int] = None
class Service:
def __init__(
self,
@ -161,6 +167,9 @@ class Service:
await self.wait_closed()
def _enable_signals(self) -> None:
global main_pid
main_pid = os.getpid()
signal.signal(signal.SIGINT, self._accept_signal)
signal.signal(signal.SIGTERM, self._accept_signal)
if platform == "win32" or platform == "cygwin":
@ -169,11 +178,25 @@ class Service:
def _accept_signal(self, signal_number: int, stack_frame):
self._log.info(f"got signal {signal_number}")
# we only handle signals in the main process. In the ProcessPoolExecutor
# processes, we have to ignore them. We'll shut them down gracefully
# from the main process
global main_pid
if os.getpid() != main_pid:
return
self.stop()
def stop(self) -> None:
if not self._is_stopping.is_set():
self._is_stopping.set()
# start with UPnP, since this can take a while, we want it to happen
# in the background while shutting down everything else
for port in self._upnp_ports:
if self.upnp is not None:
self.upnp.release(port)
self._log.info("Cancelling reconnect task")
for _ in self._reconnect_tasks:
_.cancel()
@ -193,10 +216,6 @@ class Service:
self._rpc_close_task = asyncio.create_task(close_rpc_server())
for port in self._upnp_ports:
if self.upnp is not None:
self.upnp.release(port)
async def wait_closed(self) -> None:
await self._is_stopping.wait()
@ -212,6 +231,11 @@ class Service:
self._log.info("Waiting for service _await_closed callback")
await self._node._await_closed()
if self.upnp is not None:
# this is a blocking call, waiting for the UPnP thread to exit
self.upnp.shutdown()
self._log.info(f"Service {self._service_name} at port {self._advertised_port} fully closed")

View File

@ -1,6 +1,7 @@
import logging
import threading
from queue import Queue
from typing import Optional
try:
import miniupnpc
@ -12,9 +13,10 @@ log = logging.getLogger(__name__)
class UPnP:
def __init__(self):
self.queue = Queue()
thread: Optional[threading.Thread] = None
queue: Queue = Queue()
def __init__(self):
def run():
try:
self.upnp = miniupnpc.UPnP()
@ -35,8 +37,9 @@ class UPnP:
)
elif msg[0] == "release":
port = msg[1]
log.info(f"UPnP, releasing port {port}")
self.upnp.deleteportmapping(port, "TCP")
log.info(f"Port {port} closed with UPnP")
log.info(f"UPnP, Port {port} closed")
elif msg[0] == "shutdown":
keep_going = False
except Exception as e:
@ -55,8 +58,14 @@ class UPnP:
self.queue.put(("release", port))
def shutdown(self):
if not self.thread:
return
self.queue.put(("shutdown",))
log.info("UPnP, shutting down thread")
self.thread.join()
self.thread = None
# this is here just in case the UPnP object is destroyed non-gracefully,
# e.g. via an exception before the main thread can call shutdown()
def __del__(self):
self.shutdown()

View File

@ -360,6 +360,8 @@ wallet:
port: 8449
rpc_port: 9256
enable_profiler: False
# The minimum height that we care about for our transactions. Set to zero
# If we are restoring from private key and don't know the height.
starting_height: 0

View File

@ -3,16 +3,15 @@ def format_bytes(bytes: int) -> str:
if not isinstance(bytes, int) or bytes < 0:
return "Invalid"
labels = {0: "MiB", 1: "GiB", 2: "TiB", 3: "PiB", 4: "EiB", 5: "ZiB", 6: "YiB"}
LABELS = ("MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB")
BASE = 1024
value = bytes / BASE
for label in LABELS:
value /= BASE
if value < BASE:
return f"{value:.3f} {label}"
base = 1024
value = bytes / base ** 2
key = 0
while value >= base and key < 6:
value /= base
key += 1
return f"{value:.3f} {labels[key]}"
return f"{value:.3f} {LABELS[-1]}"
def format_minutes(minutes: int) -> str:

View File

@ -19,9 +19,9 @@ from chia.util.path import mkdir, path_from_root
# python chia/utils/profiler.py ~/.chia/mainnet/profile 10 20
async def profile_task(root_path: pathlib.Path, log: logging.Logger) -> None:
async def profile_task(root_path: pathlib.Path, service: str, log: logging.Logger) -> None:
profile_dir = path_from_root(root_path, "profile")
profile_dir = path_from_root(root_path, f"profile-{service}")
log.info("Starting profiler. saving to %s" % profile_dir)
mkdir(profile_dir)

View File

@ -1 +1 @@
ff02ffff01ff02ffff03ff81bfffff01ff04ffff04ff14ffff04ff0bff808080ffff04ffff04ff1cffff04ff82017fffff04ff8217dfff80808080ffff04ffff04ff10ffff04ff17ffff04ffff02ff1effff04ff02ffff04ffff04ff82017fffff04ff8217dfffff04ff820bffff80808080ffff01ff8080808080ff80808080ff80808080ffff01ff02ff16ffff04ff02ffff04ff05ffff04ff820bdfffff04ff8217dfffff04ff8202ffffff04ffff0bffff19ff1aff8205ff80ff2fff8202ff80ff808080808080808080ff0180ffff04ffff01ffffff323dff5233ffff3ea0ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000ffff04ffff04ff1cffff04ff0bffff04ff17ff80808080ffff04ffff04ff1cffff04ff05ffff04ff2fff80808080ffff04ffff04ff12ffff04ff5fff808080ffff04ffff04ff18ffff04ffff0bff5fffff01818080ff808080ff8080808080ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff1effff04ff02ffff04ff09ff80808080ffff02ff1effff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080
ff02ffff01ff02ffff03ff82017fffff01ff04ffff04ff1cffff04ff5fff808080ffff04ffff04ff0affff04ff8202ffffff04ff820bbfff80808080ffff04ffff04ff08ffff04ff17ffff04ffff02ff1effff04ff02ffff04ffff04ff8202ffffff04ff820bbfffff04ff8217ffff80808080ffff01ff8080808080ff80808080ff80808080ffff01ff04ffff04ff0affff04ff8205bfffff04ff820bbfff80808080ffff04ffff04ff0affff04ff05ffff04ff8205ffff80808080ffff04ffff04ff16ffff04ffff0bffff19ff2fff820bff80ff0bff8205ff80ff808080ffff04ffff04ff14ffff04ffff0bffff0bffff19ff2fff820bff80ff0bff8205ff80ffff01818080ff808080ff808080808080ff0180ffff04ffff01ffff32ff3d52ff33ff3eff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff1effff04ff02ffff04ff09ff80808080ffff02ff1effff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080

View File

@ -51,6 +51,7 @@ from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet_action import WalletAction
from chia.wallet.wallet_blockchain import ReceiveBlockResult
from chia.wallet.wallet_state_manager import WalletStateManager
from chia.util.profiler import profile_task
class WalletNode:
@ -142,6 +143,9 @@ class WalletNode:
self.logged_in = False
return False
if self.config.get("enable_profiler", False):
asyncio.create_task(profile_task(self.root_path, "wallet", self.log))
db_path_key_suffix = str(private_key.get_g1().get_fingerprint())
db_path_replaced: str = (
self.config["database_path"]
@ -316,6 +320,7 @@ class WalletNode:
self.config["introducer_peer"],
[],
self.config["peer_connect_interval"],
self.config["selected_network"],
self.log,
)
asyncio.create_task(self.wallet_peers.start())
@ -366,6 +371,8 @@ class WalletNode:
if self.wallet_state_manager is None:
return None
header_block_records: List[HeaderBlockRecord] = []
assert self.server
trusted = self.server.is_trusted_peer(peer, self.config["trusted_peers"])
async with self.wallet_state_manager.blockchain.lock:
for block in header_blocks:
if block.is_transaction_block:
@ -395,7 +402,7 @@ class WalletNode:
header_block_records.append(hbr)
additional_coin_spends = []
(result, error, fork_h,) = await self.wallet_state_manager.blockchain.receive_block(
hbr, additional_coin_spends=additional_coin_spends
hbr, trusted=trusted, additional_coin_spends=additional_coin_spends
)
if result == ReceiveBlockResult.NEW_PEAK:
if not self.wallet_state_manager.sync_mode:

View File

@ -124,7 +124,6 @@ class WalletNodeAPI:
@api_request
async def reject_puzzle_solution(self, request: wallet_protocol.RejectPuzzleSolution):
self.log.warning(f"Reject puzzle solution: {request}")
pass
@api_request
async def respond_header_blocks(self, request: wallet_protocol.RespondHeaderBlocks):
@ -133,4 +132,3 @@ class WalletNodeAPI:
@api_request
async def reject_header_blocks(self, request: wallet_protocol.RejectHeaderBlocks):
self.log.warning(f"Reject header blocks: {request}")
pass

View File

@ -84,7 +84,7 @@ find_python() {
unset BEST_VERSION
for V in 37 3.7 38 3.8 39 3.9 3; do
if which python$V >/dev/null; then
if [ x"$BEST_VERSION" = x ]; then
if [ "$BEST_VERSION" = "" ]; then
BEST_VERSION=$V
fi
fi
@ -93,7 +93,7 @@ find_python() {
set -e
}
if [ x"$INSTALL_PYTHON_VERSION" = x ]; then
if [ "$INSTALL_PYTHON_VERSION" = "" ]; then
INSTALL_PYTHON_VERSION=$(find_python)
fi

View File

@ -2,9 +2,9 @@ from setuptools import setup
dependencies = [
"blspy==1.0.2", # Signature library
"chiavdf==1.0.1", # timelord and vdf verification
"chiavdf==1.0.2", # timelord and vdf verification
"chiabip158==1.0", # bip158-style wallet filters
"chiapos==1.0.2", # proof of space
"chiapos==1.0.3", # proof of space
"clvm==0.9.6",
"clvm_rs==0.1.7",
"clvm_tools==0.4.3",

View File

@ -12,6 +12,7 @@ class TestMisc:
assert format_bytes(-1024) == "Invalid"
assert format_bytes(0) == "0.000 MiB"
assert format_bytes(1024) == "0.001 MiB"
assert format_bytes(1024 ** 2 - 1000) == "0.999 MiB"
assert format_bytes(1024 ** 2) == "1.000 MiB"
assert format_bytes(1024 ** 3) == "1.000 GiB"
assert format_bytes(1024 ** 4) == "1.000 TiB"
@ -20,6 +21,8 @@ class TestMisc:
assert format_bytes(1024 ** 7) == "1.000 ZiB"
assert format_bytes(1024 ** 8) == "1.000 YiB"
assert format_bytes(1024 ** 9) == "1024.000 YiB"
assert format_bytes(1024 ** 10) == "1048576.000 YiB"
assert format_bytes(1024 ** 20).endswith("YiB")
@pytest.mark.asyncio
async def test_format_minutes(self):