mirror of
https://github.com/Chia-Network/chia-blockchain.git
synced 2024-12-11 11:24:28 +03:00
catch up with mypy 1.1.1 (#14767)
This commit is contained in:
parent
8430b072e2
commit
a42bf49050
@ -1221,7 +1221,7 @@ def validate_recent_blocks(
|
||||
ses_blocks, sub_slots, transaction_blocks = 0, 0, 0
|
||||
challenge, prev_challenge = recent_chain.recent_chain_data[0].reward_chain_block.pos_ss_cc_challenge_hash, None
|
||||
tip_height = recent_chain.recent_chain_data[-1].height
|
||||
prev_block_record = None
|
||||
prev_block_record: Optional[BlockRecord] = None
|
||||
deficit = uint8(0)
|
||||
adjusted = False
|
||||
for idx, block in enumerate(recent_chain.recent_chain_data):
|
||||
@ -1245,10 +1245,10 @@ def validate_recent_blocks(
|
||||
if (challenge is not None) and (prev_challenge is not None):
|
||||
overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index)
|
||||
if not adjusted:
|
||||
assert prev_block_record is not None
|
||||
prev_block_record = dataclasses.replace(
|
||||
prev_block_record, deficit=deficit % constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
|
||||
)
|
||||
assert prev_block_record is not None
|
||||
sub_blocks.add_block_record(prev_block_record)
|
||||
adjusted = True
|
||||
deficit = get_deficit(constants, deficit, prev_block_record, overflow, len(block.finished_sub_slots))
|
||||
|
@ -23,13 +23,16 @@ from typing import (
|
||||
)
|
||||
|
||||
from blspy import G1Element, G2Element, PrivateKey
|
||||
from typing_extensions import Literal, get_args, get_origin
|
||||
from typing_extensions import TYPE_CHECKING, Literal, get_args, get_origin
|
||||
|
||||
from chia.types.blockchain_format.sized_bytes import bytes32
|
||||
from chia.util.byte_types import hexstr_to_bytes
|
||||
from chia.util.hash import std_hash
|
||||
from chia.util.ints import uint32
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _typeshed import DataclassInstance
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=1, width=120, compact=True)
|
||||
|
||||
|
||||
@ -114,7 +117,7 @@ class Field:
|
||||
StreamableFields = Tuple[Field, ...]
|
||||
|
||||
|
||||
def create_fields(cls: Type[object]) -> StreamableFields:
|
||||
def create_fields(cls: Type[DataclassInstance]) -> StreamableFields:
|
||||
hints = get_type_hints(cls)
|
||||
fields = []
|
||||
for field in dataclasses.fields(cls):
|
||||
@ -559,7 +562,7 @@ def streamable(cls: Type[_T_Streamable]) -> Type[_T_Streamable]:
|
||||
|
||||
cls._streamable_fields = create_fields(cls)
|
||||
|
||||
return cls
|
||||
return cls # type: ignore[return-value]
|
||||
|
||||
|
||||
class Streamable:
|
||||
|
@ -500,10 +500,10 @@ async def test_farmer_get_harvester_plots_endpoints(
|
||||
await wait_for_plot_sync(receiver, last_sync_id)
|
||||
|
||||
for page_size in [1, int(total_count / 2), total_count - 1, total_count, total_count + 1, 100]:
|
||||
request = dataclasses.replace(request, page_size=uint32(page_size))
|
||||
request = dataclasses.replace(request, page_size=uint32(page_size)) # type: ignore[type-var]
|
||||
expected_page_count = ceil(total_count / page_size)
|
||||
for page in range(expected_page_count):
|
||||
request = dataclasses.replace(request, page=uint32(page))
|
||||
request = dataclasses.replace(request, page=uint32(page)) # type: ignore[type-var]
|
||||
await wait_for_synced_receiver(farmer_service._api.farmer, harvester_id)
|
||||
page_result = await endpoint(farmer_rpc_client, request)
|
||||
offset = page * page_size
|
||||
|
@ -233,7 +233,7 @@ async def test_mm_calls_new_block_height() -> None:
|
||||
new_block_height_called = True
|
||||
|
||||
# Replace new_block_height with test function
|
||||
mempool_manager.fee_estimator.new_block_height = types.MethodType( # type: ignore[assignment]
|
||||
mempool_manager.fee_estimator.new_block_height = types.MethodType( # type: ignore[method-assign]
|
||||
test_new_block_height_called, mempool_manager.fee_estimator
|
||||
)
|
||||
block2 = create_test_block_record(height=uint32(2))
|
||||
|
@ -586,7 +586,7 @@ async def test_sync_start_and_disconnect_while_sync_is_active(
|
||||
# Replace the `Receiver._process` with `disconnecting_process` which triggers a plot manager refresh and disconnects
|
||||
# the farmer from the harvester during an active sync.
|
||||
original_process = receiver._process
|
||||
receiver._process = functools.partial(disconnecting_process, receiver) # type: ignore[assignment]
|
||||
receiver._process = functools.partial(disconnecting_process, receiver) # type: ignore[method-assign]
|
||||
# Trigger the refresh which leads to a new sync_start being triggered during the active sync.
|
||||
harvester.plot_manager.trigger_refresh()
|
||||
await time_out_assert(20, harvester.plot_sync_sender.sync_active)
|
||||
|
@ -253,7 +253,7 @@ async def create_test_runner(
|
||||
receiver.simulate_error = 0 # type: ignore[attr-defined]
|
||||
receiver.message_counter = 0 # type: ignore[attr-defined]
|
||||
receiver.original_process = receiver._process # type: ignore[attr-defined]
|
||||
receiver._process = functools.partial(_testable_process, receiver) # type: ignore[assignment]
|
||||
receiver._process = functools.partial(_testable_process, receiver) # type: ignore[method-assign]
|
||||
return TestRunner(harvesters, farmer, event_loop)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user