chia-blockchain/chia/server/server.py

708 lines
31 KiB
Python
Raw Normal View History

from __future__ import annotations
2019-08-22 07:19:24 +03:00
import asyncio
2019-11-18 07:49:39 +03:00
import logging
2020-11-05 09:34:34 +03:00
import ssl
import time
import traceback
from dataclasses import dataclass, field
from ipaddress import IPv4Network, IPv6Network, ip_network
2020-10-09 01:50:39 +03:00
from pathlib import Path
from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple, Union, cast
from aiohttp import (
ClientResponseError,
ClientSession,
ClientTimeout,
ServerDisconnectedError,
WSCloseCode,
client_exceptions,
web,
)
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from typing_extensions import final
2020-10-28 20:45:10 +03:00
from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.protocols.protocol_state_machine import message_requires_reply
from chia.protocols.protocol_timing import INVALID_PROTOCOL_BAN_SECONDS
from chia.protocols.shared_protocol import protocol_version
from chia.server.api_protocol import ApiProtocol
from chia.server.introducer_peers import IntroducerPeers
from chia.server.outbound_message import Message, NodeType
from chia.server.ssl_context import private_ssl_paths, public_ssl_paths
from chia.server.ws_connection import ConnectionCallback, WSChiaConnection
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.peer_info import PeerInfo
from chia.util.errors import Err, ProtocolError
from chia.util.ints import uint16
2023-02-20 19:23:39 +03:00
from chia.util.network import WebServer, is_in_network, is_localhost, is_trusted_peer
from chia.util.ssl_check import verify_ssl_certs_and_keys
from chia.util.streamable import Streamable
max_message_size = 50 * 1024 * 1024 # 50MB
2020-05-20 21:36:53 +03:00
2020-11-05 09:34:34 +03:00
def ssl_context_for_server(
ca_cert: Path,
ca_key: Path,
cert_path: Path,
key_path: Path,
*,
check_permissions: bool = True,
log: Optional[logging.Logger] = None,
) -> ssl.SSLContext:
if check_permissions:
verify_ssl_certs_and_keys([ca_cert, cert_path], [ca_key, key_path], log)
ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.CLIENT_AUTH, cafile=str(ca_cert))
ssl_context.check_hostname = False
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
ssl_context.set_ciphers(
"ECDHE-ECDSA-AES256-GCM-SHA384:"
"ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-CHACHA20-POLY1305:"
"ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES128-GCM-SHA256:"
"ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-SHA384:"
"ECDHE-RSA-AES256-SHA384:"
"ECDHE-ECDSA-AES128-SHA256:"
"ECDHE-RSA-AES128-SHA256"
)
ssl_context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
ssl_context.verify_mode = ssl.CERT_REQUIRED
2020-11-05 09:34:34 +03:00
return ssl_context
def ssl_context_for_root(
ca_cert_file: str, *, check_permissions: bool = True, log: Optional[logging.Logger] = None
) -> ssl.SSLContext:
if check_permissions:
verify_ssl_certs_and_keys([Path(ca_cert_file)], [], log)
ssl_context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_cert_file)
return ssl_context
def ssl_context_for_client(
ca_cert: Path,
ca_key: Path,
2023-03-30 04:18:51 +03:00
cert_path: Path,
key_path: Path,
*,
check_permissions: bool = True,
log: Optional[logging.Logger] = None,
) -> ssl.SSLContext:
if check_permissions:
2023-03-30 04:18:51 +03:00
verify_ssl_certs_and_keys([ca_cert, cert_path], [ca_key, key_path], log)
ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=str(ca_cert))
ssl_context.check_hostname = False
2023-03-30 04:18:51 +03:00
ssl_context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
ssl_context.verify_mode = ssl.CERT_REQUIRED
2020-11-05 09:34:34 +03:00
return ssl_context
def calculate_node_id(cert_path: Path) -> bytes32:
pem_cert = x509.load_pem_x509_certificate(cert_path.read_bytes(), default_backend())
der_cert_bytes = pem_cert.public_bytes(encoding=serialization.Encoding.DER)
der_cert = x509.load_der_x509_certificate(der_cert_bytes, default_backend())
return bytes32(der_cert.fingerprint(hashes.SHA256()))
@final
@dataclass
2019-10-29 19:37:43 +03:00
class ChiaServer:
_port: Optional[int]
_local_type: NodeType
_local_capabilities_for_handshake: List[Tuple[uint16, str]]
_ping_interval: int
_network_id: str
_inbound_rate_limit_percent: int
_outbound_rate_limit_percent: int
api: ApiProtocol
node: Any
root_path: Path
config: Dict[str, Any]
log: logging.Logger
ssl_context: ssl.SSLContext
ssl_client_context: ssl.SSLContext
node_id: bytes32
exempt_peer_networks: List[Union[IPv4Network, IPv6Network]]
all_connections: Dict[bytes32, WSChiaConnection] = field(default_factory=dict)
on_connect: Optional[ConnectionCallback] = None
shut_down_event: asyncio.Event = field(default_factory=asyncio.Event)
introducer_peers: Optional[IntroducerPeers] = None
gc_task: Optional[asyncio.Task[None]] = None
webserver: Optional[WebServer] = None
connection_close_task: Optional[asyncio.Task[None]] = None
received_message_callback: Optional[ConnectionCallback] = None
banned_peers: Dict[str, float] = field(default_factory=dict)
invalid_protocol_ban_seconds = INVALID_PROTOCOL_BAN_SECONDS
@classmethod
def create(
cls,
port: Optional[int],
node: Any,
api: ApiProtocol,
local_type: NodeType,
ping_interval: int,
network_id: str,
inbound_rate_limit_percent: int,
outbound_rate_limit_percent: int,
Request header blocks, and new rate limits (#11636) * new blob block api method integrated into wallet * direct msg streaming of headers, rename, tests * perform_handshake call fix * updated trusted sync with new block header calls * add max blocks limit to fetch * added tests for rejected block header msgs * avoid parsing transactions info if not required * avoid looking up capabilities setting * move block tests out of a class * test fix * Merge changes * added docs and increased rate limits * increased block header request interval from 32 to 128 * remove fetching hashes and use height range * fetching by height in db v2 * update capabilities, other fixes * fixed range block header call * Add type hints * Start work on optimizing fetch_last_tx_from_peer * Huge speedup in trusted wallet sync * Revert unintentional changes * Fix trade issue * Improve the code * Str format * Optimize handling of farming rewards * Fix bug * Performance fixes * Optimizations to wallet syncing * Don't return all coins in respond_additions * Revert concurrency numbers * More optimization of the caches * Small optimization in coin_added * Optimize request_additions significantly by using a cache * fixes from feedback * capabilities check fixes * Increase rate limits to allow 250tps in verification requests * Start work on rate limits * New rate limit versioning support * Revert unrelated changes * revert return False * Lint * Revert cbi * try tests with trusted peer * Revert unrelated wallet changes * Revert more debug changes * Add test and throw on an error if not found * Reject invalid requests * Revert bad change with uint32, and change warning to info * Parametrize wallet sync test * Merge and LGTM * More clean way to choose peers * Fix lint * add the new RejectBlockHeaders, RequestBlockHeaders and RespondBlockHeaders to the network protocol regression test and regenerate test files * Rate limit diffs only * Improve performance * Simpler * Lint Co-authored-by: Sebastjan <trepca@gmail.com> Co-authored-by: arvidn <arvid@libtorrent.org>
2022-06-11 09:35:41 +03:00
capabilities: List[Tuple[uint16, str]],
2020-11-11 07:14:06 +03:00
root_path: Path,
config: Dict[str, Any],
private_ca_crt_key: Tuple[Path, Path],
chia_ca_crt_key: Tuple[Path, Path],
name: str = __name__,
) -> ChiaServer:
log = logging.getLogger(name)
log.info("Service capabilities: %s", capabilities)
2020-10-16 04:03:46 +03:00
ca_private_crt_path, ca_private_key_path = private_ca_crt_key
chia_ca_crt_path, chia_ca_key_path = chia_ca_crt_key
private_cert_path, private_key_path = None, None
public_cert_path, public_key_path = None, None
authenticated_client_types = {NodeType.HARVESTER}
authenticated_server_types = {NodeType.HARVESTER, NodeType.FARMER, NodeType.WALLET, NodeType.DATA_LAYER}
if local_type in authenticated_client_types:
# Authenticated clients
private_cert_path, private_key_path = private_ssl_paths(root_path, config)
ssl_client_context = ssl_context_for_client(
ca_cert=ca_private_crt_path,
ca_key=ca_private_key_path,
2023-03-30 04:18:51 +03:00
cert_path=private_cert_path,
key_path=private_key_path,
)
else:
# Public clients
public_cert_path, public_key_path = public_ssl_paths(root_path, config)
ssl_client_context = ssl_context_for_client(
ca_cert=chia_ca_crt_path,
ca_key=chia_ca_key_path,
2023-03-30 04:18:51 +03:00
cert_path=public_cert_path,
key_path=public_key_path,
)
if local_type in authenticated_server_types:
# Authenticated servers
private_cert_path, private_key_path = private_ssl_paths(root_path, config)
ssl_context = ssl_context_for_server(
ca_cert=ca_private_crt_path,
ca_key=ca_private_key_path,
cert_path=private_cert_path,
key_path=private_key_path,
log=log,
)
else:
# Public servers
public_cert_path, public_key_path = public_ssl_paths(root_path, config)
ssl_context = ssl_context_for_server(
ca_cert=chia_ca_crt_path,
ca_key=chia_ca_key_path,
cert_path=public_cert_path,
key_path=public_key_path,
log=log,
)
node_id_cert_path = private_cert_path if public_cert_path is None else public_cert_path
assert node_id_cert_path is not None
return cls(
_port=port,
_local_type=local_type,
_local_capabilities_for_handshake=capabilities,
_ping_interval=ping_interval,
_network_id=network_id,
_inbound_rate_limit_percent=inbound_rate_limit_percent,
_outbound_rate_limit_percent=outbound_rate_limit_percent,
log=log,
api=api,
node=node,
root_path=root_path,
config=config,
ssl_context=ssl_context,
ssl_client_context=ssl_client_context,
node_id=calculate_node_id(node_id_cert_path),
exempt_peer_networks=[ip_network(net, strict=False) for net in config.get("exempt_peer_networks", [])],
introducer_peers=IntroducerPeers() if local_type is NodeType.INTRODUCER else None,
)
def set_received_message_callback(self, callback: ConnectionCallback) -> None:
self.received_message_callback = callback
2020-11-30 11:54:18 +03:00
async def garbage_collect_connections_task(self) -> None:
"""
Periodically checks for connections with no activity (have not sent us any data), and removes them,
to allow room for other peers.
"""
Chia Seeder (#8991) * initial hack * crawler * add pytz * Checkpoint. * Catch some bugs. * Localhost dig working. * Checkpoint: return only high quality nodes. * Statistics. * Try improving finding reliable nodes. * Bug. * Move db to memory. * Timestamp in the last 5 days. * Increase crawl parameters, 180+ connections per sec. * Bug. * Optimize for DNS traffic. * Prepare for hosting. * Minimum height. * Typo. * Try catch everything. * dnslib. * Add db, format code. * nits. * No connections for the dns server. * Rename src -> chia * Fix some issues with v1.1 * Crawler task pool. * Optimize closing connections. * Split crawler and dns server. * Install instructions. * Catch startup bug. * Try a big timeout for lock aquire. * lint. * Lint. * Initial commit extended stats. * Simplify code. * Config. * Correct stats. * Be more restrictive in crawling. * Attempt to fix stats bug. * Add other peers port to config. * Update README for the config. * Simplify crawl task. * Fix bug on restarts. * Prevent log spamming. * More spam prevention. * Fix bug. * Ipv6 (#1) * Enable ipv6. * Fix bug. * Use numeric codes for QTYPE. * ANY working. * More spam prevention. * Try to improve IPv6 selection. * Log IPv6 available. * Try to crawl more aggresive for v6. * rename dns.py to crawler_dns.py so it doesn't conflict with imported package names * Remove pytz package off dependencies * Tidy-up ws_connection.py * Fix spelling * Reinstate chia-blockchain readme, with additional lines pertaining to the DNS introducer & crawler * More detailed info in the README wrt Chia Seeder * Nit * More memetic naming of Chia Seeder * Nit * Add entry points * Add entry in packages * Patch some methods on the upstream server * Update peer record fields * Standard library imports first * Crawler API check * Reconcile crawl store * Account for crawler_db_path in config * Await crawl store load DB and load reliable peers * Updates to crawler * Rename to dns_server * Crawler-specific overrides for the chia server * Edit comment * Undo changes to ChiaServer in view of crawler-specific overrides introduced in previous commit * Nit * Update service groups * Expand name maps, mostly * Fix the init config * Remove unused import * total_records unused at this stage * Remove ios_reliable in peer_reliability table * Remove row[20] entry * Split overly long line * Fix * Type hint for ns_records * Reconcile mismatch btw type int and uint64 * Type annotations in crawler * Check whether crawl store is set * Remove upnp_list * Lint * Chia Seeder CLI * Lint * Two white spaces * 3rd party package import * Cleaner way to handle overrides for ChiaServer method * Address linter warnings * Rename * Nits * Fix * Change port # * Most chia_seeder commands up and running * Rename * Progress of sorts * Fix * Improve legibility * Fix naming * Fix setup.py * Lint * None -> '' * Remove whitespace * Rename * Log ipv6 better. (#9227) * Log ipv6 better. * Lint. * - * Undo GUI changes * Another attempt * GUI changes Co-authored-by: Yostra <straya@chia.net> Co-authored-by: Florin Chirica <fchirica96@gmail.com> Co-authored-by: Chris Marslender <chrismarslender@gmail.com>
2021-11-28 05:30:25 +03:00
is_crawler = getattr(self.node, "crawl", None)
while True:
Chia Seeder (#8991) * initial hack * crawler * add pytz * Checkpoint. * Catch some bugs. * Localhost dig working. * Checkpoint: return only high quality nodes. * Statistics. * Try improving finding reliable nodes. * Bug. * Move db to memory. * Timestamp in the last 5 days. * Increase crawl parameters, 180+ connections per sec. * Bug. * Optimize for DNS traffic. * Prepare for hosting. * Minimum height. * Typo. * Try catch everything. * dnslib. * Add db, format code. * nits. * No connections for the dns server. * Rename src -> chia * Fix some issues with v1.1 * Crawler task pool. * Optimize closing connections. * Split crawler and dns server. * Install instructions. * Catch startup bug. * Try a big timeout for lock aquire. * lint. * Lint. * Initial commit extended stats. * Simplify code. * Config. * Correct stats. * Be more restrictive in crawling. * Attempt to fix stats bug. * Add other peers port to config. * Update README for the config. * Simplify crawl task. * Fix bug on restarts. * Prevent log spamming. * More spam prevention. * Fix bug. * Ipv6 (#1) * Enable ipv6. * Fix bug. * Use numeric codes for QTYPE. * ANY working. * More spam prevention. * Try to improve IPv6 selection. * Log IPv6 available. * Try to crawl more aggresive for v6. * rename dns.py to crawler_dns.py so it doesn't conflict with imported package names * Remove pytz package off dependencies * Tidy-up ws_connection.py * Fix spelling * Reinstate chia-blockchain readme, with additional lines pertaining to the DNS introducer & crawler * More detailed info in the README wrt Chia Seeder * Nit * More memetic naming of Chia Seeder * Nit * Add entry points * Add entry in packages * Patch some methods on the upstream server * Update peer record fields * Standard library imports first * Crawler API check * Reconcile crawl store * Account for crawler_db_path in config * Await crawl store load DB and load reliable peers * Updates to crawler * Rename to dns_server * Crawler-specific overrides for the chia server * Edit comment * Undo changes to ChiaServer in view of crawler-specific overrides introduced in previous commit * Nit * Update service groups * Expand name maps, mostly * Fix the init config * Remove unused import * total_records unused at this stage * Remove ios_reliable in peer_reliability table * Remove row[20] entry * Split overly long line * Fix * Type hint for ns_records * Reconcile mismatch btw type int and uint64 * Type annotations in crawler * Check whether crawl store is set * Remove upnp_list * Lint * Chia Seeder CLI * Lint * Two white spaces * 3rd party package import * Cleaner way to handle overrides for ChiaServer method * Address linter warnings * Rename * Nits * Fix * Change port # * Most chia_seeder commands up and running * Rename * Progress of sorts * Fix * Improve legibility * Fix naming * Fix setup.py * Lint * None -> '' * Remove whitespace * Rename * Log ipv6 better. (#9227) * Log ipv6 better. * Lint. * - * Undo GUI changes * Another attempt * GUI changes Co-authored-by: Yostra <straya@chia.net> Co-authored-by: Florin Chirica <fchirica96@gmail.com> Co-authored-by: Chris Marslender <chrismarslender@gmail.com>
2021-11-28 05:30:25 +03:00
await asyncio.sleep(600 if is_crawler is None else 2)
to_remove: List[WSChiaConnection] = []
for connection in self.all_connections.values():
if connection.closed:
to_remove.append(connection)
elif (
2022-02-27 02:31:10 +03:00
self._local_type == NodeType.FULL_NODE or self._local_type == NodeType.WALLET
) and connection.connection_type == NodeType.FULL_NODE:
Chia Seeder (#8991) * initial hack * crawler * add pytz * Checkpoint. * Catch some bugs. * Localhost dig working. * Checkpoint: return only high quality nodes. * Statistics. * Try improving finding reliable nodes. * Bug. * Move db to memory. * Timestamp in the last 5 days. * Increase crawl parameters, 180+ connections per sec. * Bug. * Optimize for DNS traffic. * Prepare for hosting. * Minimum height. * Typo. * Try catch everything. * dnslib. * Add db, format code. * nits. * No connections for the dns server. * Rename src -> chia * Fix some issues with v1.1 * Crawler task pool. * Optimize closing connections. * Split crawler and dns server. * Install instructions. * Catch startup bug. * Try a big timeout for lock aquire. * lint. * Lint. * Initial commit extended stats. * Simplify code. * Config. * Correct stats. * Be more restrictive in crawling. * Attempt to fix stats bug. * Add other peers port to config. * Update README for the config. * Simplify crawl task. * Fix bug on restarts. * Prevent log spamming. * More spam prevention. * Fix bug. * Ipv6 (#1) * Enable ipv6. * Fix bug. * Use numeric codes for QTYPE. * ANY working. * More spam prevention. * Try to improve IPv6 selection. * Log IPv6 available. * Try to crawl more aggresive for v6. * rename dns.py to crawler_dns.py so it doesn't conflict with imported package names * Remove pytz package off dependencies * Tidy-up ws_connection.py * Fix spelling * Reinstate chia-blockchain readme, with additional lines pertaining to the DNS introducer & crawler * More detailed info in the README wrt Chia Seeder * Nit * More memetic naming of Chia Seeder * Nit * Add entry points * Add entry in packages * Patch some methods on the upstream server * Update peer record fields * Standard library imports first * Crawler API check * Reconcile crawl store * Account for crawler_db_path in config * Await crawl store load DB and load reliable peers * Updates to crawler * Rename to dns_server * Crawler-specific overrides for the chia server * Edit comment * Undo changes to ChiaServer in view of crawler-specific overrides introduced in previous commit * Nit * Update service groups * Expand name maps, mostly * Fix the init config * Remove unused import * total_records unused at this stage * Remove ios_reliable in peer_reliability table * Remove row[20] entry * Split overly long line * Fix * Type hint for ns_records * Reconcile mismatch btw type int and uint64 * Type annotations in crawler * Check whether crawl store is set * Remove upnp_list * Lint * Chia Seeder CLI * Lint * Two white spaces * 3rd party package import * Cleaner way to handle overrides for ChiaServer method * Address linter warnings * Rename * Nits * Fix * Change port # * Most chia_seeder commands up and running * Rename * Progress of sorts * Fix * Improve legibility * Fix naming * Fix setup.py * Lint * None -> '' * Remove whitespace * Rename * Log ipv6 better. (#9227) * Log ipv6 better. * Lint. * - * Undo GUI changes * Another attempt * GUI changes Co-authored-by: Yostra <straya@chia.net> Co-authored-by: Florin Chirica <fchirica96@gmail.com> Co-authored-by: Chris Marslender <chrismarslender@gmail.com>
2021-11-28 05:30:25 +03:00
if is_crawler is not None:
if time.time() - connection.creation_time > 5:
to_remove.append(connection)
else:
if time.time() - connection.last_message_time > 1800:
to_remove.append(connection)
for connection in to_remove:
self.log.debug(f"Garbage collecting connection {connection.peer_info.host} due to inactivity")
if connection.closed:
self.all_connections.pop(connection.peer_node_id)
else:
await connection.close()
2021-03-03 21:27:00 +03:00
# Also garbage collect banned_peers dict
to_remove_ban = []
for peer_ip, ban_until_time in self.banned_peers.items():
if time.time() > ban_until_time:
to_remove_ban.append(peer_ip)
for peer_ip in to_remove_ban:
del self.banned_peers[peer_ip]
async def start(
self,
prefer_ipv6: bool,
on_connect: Optional[ConnectionCallback] = None,
) -> None:
if self.webserver is not None:
raise RuntimeError("ChiaServer already started")
if self.gc_task is None:
self.gc_task = asyncio.create_task(self.garbage_collect_connections_task())
if self._port is not None:
self.on_connect = on_connect
self.webserver = await WebServer.create(
hostname="",
port=self.get_port(),
routes=[web.get("/ws", self.incoming_connection)],
ssl_context=self.ssl_context,
prefer_ipv6=prefer_ipv6,
logger=self.log,
)
self._port = int(self.webserver.listen_port)
self.log.info(f"Started listening on port: {self._port}")
2020-10-16 04:03:46 +03:00
async def incoming_connection(self, request: web.Request) -> web.StreamResponse:
Chia Seeder (#8991) * initial hack * crawler * add pytz * Checkpoint. * Catch some bugs. * Localhost dig working. * Checkpoint: return only high quality nodes. * Statistics. * Try improving finding reliable nodes. * Bug. * Move db to memory. * Timestamp in the last 5 days. * Increase crawl parameters, 180+ connections per sec. * Bug. * Optimize for DNS traffic. * Prepare for hosting. * Minimum height. * Typo. * Try catch everything. * dnslib. * Add db, format code. * nits. * No connections for the dns server. * Rename src -> chia * Fix some issues with v1.1 * Crawler task pool. * Optimize closing connections. * Split crawler and dns server. * Install instructions. * Catch startup bug. * Try a big timeout for lock aquire. * lint. * Lint. * Initial commit extended stats. * Simplify code. * Config. * Correct stats. * Be more restrictive in crawling. * Attempt to fix stats bug. * Add other peers port to config. * Update README for the config. * Simplify crawl task. * Fix bug on restarts. * Prevent log spamming. * More spam prevention. * Fix bug. * Ipv6 (#1) * Enable ipv6. * Fix bug. * Use numeric codes for QTYPE. * ANY working. * More spam prevention. * Try to improve IPv6 selection. * Log IPv6 available. * Try to crawl more aggresive for v6. * rename dns.py to crawler_dns.py so it doesn't conflict with imported package names * Remove pytz package off dependencies * Tidy-up ws_connection.py * Fix spelling * Reinstate chia-blockchain readme, with additional lines pertaining to the DNS introducer & crawler * More detailed info in the README wrt Chia Seeder * Nit * More memetic naming of Chia Seeder * Nit * Add entry points * Add entry in packages * Patch some methods on the upstream server * Update peer record fields * Standard library imports first * Crawler API check * Reconcile crawl store * Account for crawler_db_path in config * Await crawl store load DB and load reliable peers * Updates to crawler * Rename to dns_server * Crawler-specific overrides for the chia server * Edit comment * Undo changes to ChiaServer in view of crawler-specific overrides introduced in previous commit * Nit * Update service groups * Expand name maps, mostly * Fix the init config * Remove unused import * total_records unused at this stage * Remove ios_reliable in peer_reliability table * Remove row[20] entry * Split overly long line * Fix * Type hint for ns_records * Reconcile mismatch btw type int and uint64 * Type annotations in crawler * Check whether crawl store is set * Remove upnp_list * Lint * Chia Seeder CLI * Lint * Two white spaces * 3rd party package import * Cleaner way to handle overrides for ChiaServer method * Address linter warnings * Rename * Nits * Fix * Change port # * Most chia_seeder commands up and running * Rename * Progress of sorts * Fix * Improve legibility * Fix naming * Fix setup.py * Lint * None -> '' * Remove whitespace * Rename * Log ipv6 better. (#9227) * Log ipv6 better. * Lint. * - * Undo GUI changes * Another attempt * GUI changes Co-authored-by: Yostra <straya@chia.net> Co-authored-by: Florin Chirica <fchirica96@gmail.com> Co-authored-by: Chris Marslender <chrismarslender@gmail.com>
2021-11-28 05:30:25 +03:00
if getattr(self.node, "crawl", None) is not None:
raise web.HTTPForbidden(reason="incoming connections not allowed for crawler")
if request.remote is None:
raise web.HTTPInternalServerError(reason=f"remote is None for request {request}")
2021-03-03 21:27:00 +03:00
if request.remote in self.banned_peers and time.time() < self.banned_peers[request.remote]:
reason = f"Peer {request.remote} is banned, refusing connection"
self.log.warning(reason)
raise web.HTTPForbidden(reason=reason)
ws = web.WebSocketResponse(max_msg_size=max_message_size)
2020-10-16 04:03:46 +03:00
await ws.prepare(request)
ssl_object = request.get_extra_info("ssl_object")
if ssl_object is None:
reason = f"ssl_object is None for request {request}"
self.log.warning(reason)
raise web.HTTPInternalServerError(reason=reason)
cert_bytes = ssl_object.getpeercert(True)
der_cert = x509.load_der_x509_certificate(cert_bytes)
peer_id = bytes32(der_cert.fingerprint(hashes.SHA256()))
if peer_id == self.node_id:
return ws
2021-02-11 07:42:42 +03:00
connection: Optional[WSChiaConnection] = None
2020-10-16 04:03:46 +03:00
try:
connection = WSChiaConnection.create(
local_type=self._local_type,
ws=ws,
api=self.api,
server_port=self.get_port(),
log=self.log,
is_outbound=False,
received_message_callback=self.received_message_callback,
close_callback=self.connection_closed,
peer_id=peer_id,
inbound_rate_limit_percent=self._inbound_rate_limit_percent,
outbound_rate_limit_percent=self._outbound_rate_limit_percent,
local_capabilities_for_handshake=self._local_capabilities_for_handshake,
2020-10-16 04:03:46 +03:00
)
await connection.perform_handshake(self._network_id, protocol_version, self.get_port(), self._local_type)
assert connection.connection_type is not None, "handshake failed to set connection type, still None"
2021-01-21 22:20:50 +03:00
# Limit inbound connections to config's specifications.
if not self.accept_inbound_connections(connection.connection_type) and not is_in_network(
connection.peer_info.host, self.exempt_peer_networks
):
2021-08-18 23:02:30 +03:00
self.log.info(
f"Not accepting inbound connection: {connection.get_peer_logging()}.Inbound limit reached."
)
2021-01-21 22:20:50 +03:00
await connection.close()
else:
await self.connection_added(connection, self.on_connect)
if self.introducer_peers is not None and connection.connection_type is NodeType.FULL_NODE:
2021-01-21 22:20:50 +03:00
self.introducer_peers.add(connection.get_peer_info())
2020-12-11 16:58:17 +03:00
except ProtocolError as e:
2021-02-11 07:47:09 +03:00
if connection is not None:
2021-03-03 21:27:00 +03:00
await connection.close(self.invalid_protocol_ban_seconds, WSCloseCode.PROTOCOL_ERROR, e.code)
2021-02-11 07:42:42 +03:00
if e.code == Err.INVALID_HANDSHAKE:
self.log.warning("Invalid handshake with peer. Maybe the peer is running old software.")
2021-02-19 07:56:21 +03:00
elif e.code == Err.INCOMPATIBLE_NETWORK_ID:
self.log.warning("Incompatible network ID. Maybe the peer is on another network")
else:
error_stack = traceback.format_exc()
2020-12-11 16:58:17 +03:00
self.log.error(f"Exception {e}, exception Stack: {error_stack}")
except ValueError as e:
if connection is not None:
await connection.close(self.invalid_protocol_ban_seconds, WSCloseCode.PROTOCOL_ERROR, Err.UNKNOWN)
self.log.warning(f"{e} - closing connection")
2020-12-11 16:58:17 +03:00
except Exception as e:
2021-03-03 21:27:00 +03:00
if connection is not None:
await connection.close(ws_close_code=WSCloseCode.PROTOCOL_ERROR, error=Err.UNKNOWN)
2020-12-11 16:58:17 +03:00
error_stack = traceback.format_exc()
self.log.error(f"Exception {e}, exception Stack: {error_stack}")
if connection is not None:
await connection.wait_until_closed()
2020-10-16 04:03:46 +03:00
return ws
async def connection_added(
self, connection: WSChiaConnection, on_connect: Optional[ConnectionCallback] = None
) -> None:
2021-03-03 21:27:00 +03:00
# If we already had a connection to this peer_id, close the old one. This is secure because peer_ids are based
# on TLS public keys
if connection.closed:
self.log.debug(f"ignoring unexpected request to add closed connection {connection.peer_info.host} ")
return
if connection.peer_node_id in self.all_connections:
con = self.all_connections[connection.peer_node_id]
await con.close()
2020-10-30 04:45:47 +03:00
self.all_connections[connection.peer_node_id] = connection
2020-12-10 19:34:56 +03:00
if connection.connection_type is not None:
if on_connect is not None:
await on_connect(connection)
else:
self.log.error(f"Invalid connection type for connection {connection}")
2020-10-30 04:45:47 +03:00
def is_duplicate_or_self_connection(self, target_node: PeerInfo) -> bool:
if is_localhost(target_node.host) and target_node.port == self._port:
# Don't connect to self
self.log.debug(f"Not connecting to {target_node}")
return True
for connection in self.all_connections.values():
if connection.peer_info.host == target_node.host and connection.peer_server_port == target_node.port:
self.log.debug(f"Not connecting to {target_node}, duplicate connection")
return True
return False
2019-11-18 07:50:31 +03:00
async def start_client(
self,
target_node: PeerInfo,
on_connect: Optional[ConnectionCallback] = None,
is_feeler: bool = False,
2019-11-18 07:50:31 +03:00
) -> bool:
2019-11-01 04:33:03 +03:00
"""
Tries to connect to the target node, adding one connection into the pipeline, if successful.
An on connect method can also be specified, and this will be saved into the instance variables.
"""
if self.is_duplicate_or_self_connection(target_node):
self.log.warning(f"cannot connect to {target_node.host}, duplicate/self connection")
2020-12-03 16:49:14 +03:00
return False
2021-03-03 21:27:00 +03:00
if target_node.host in self.banned_peers and time.time() < self.banned_peers[target_node.host]:
self.log.warning(f"Peer {target_node.host} is still banned, not connecting to it")
return False
2020-10-16 04:03:46 +03:00
session = None
2021-02-11 07:42:42 +03:00
connection: Optional[WSChiaConnection] = None
try:
# Crawler/DNS introducer usually uses a lower timeout than the default
timeout_value = float(self.config.get("peer_connect_timeout", 30))
timeout = ClientTimeout(total=timeout_value)
2020-12-10 09:17:54 +03:00
session = ClientSession(timeout=timeout)
ip = f"[{target_node.ip}]" if target_node.ip.is_v6 else f"{target_node.ip}"
url = f"wss://{ip}:{target_node.port}/ws"
self.log.debug(f"Connecting: {url}, Peer info: {target_node}")
try:
2020-12-16 13:31:59 +03:00
ws = await session.ws_connect(
url,
autoclose=True,
autoping=True,
heartbeat=60,
ssl=self.ssl_client_context,
max_msg_size=max_message_size,
2020-12-16 13:31:59 +03:00
)
except ServerDisconnectedError:
self.log.debug(f"Server disconnected error connecting to {url}. Perhaps we are banned by the peer.")
return False
except ClientResponseError as e:
self.log.warning(f"Connection failed to {url}. Error: {e}")
return False
2020-12-11 19:40:02 +03:00
except asyncio.TimeoutError:
self.log.debug(f"Timeout error connecting to {url}")
return False
if ws is None:
self.log.warning(f"Connection failed to {url}. ws was None")
2020-12-11 19:40:02 +03:00
return False
ssl_object = ws.get_extra_info("ssl_object")
if ssl_object is None:
raise ValueError(f"ssl_object is None for {ws}")
cert_bytes = ssl_object.getpeercert(True)
der_cert = x509.load_der_x509_certificate(cert_bytes, default_backend())
peer_id = bytes32(der_cert.fingerprint(hashes.SHA256()))
if peer_id == self.node_id:
2023-04-03 15:24:16 +03:00
self.log.info(f"Connected to a node with the same peer ID, disconnecting: {target_node} {peer_id}")
return False
server_port: uint16
try:
server_port = self.get_port()
except ValueError:
server_port = uint16(0)
connection = WSChiaConnection.create(
local_type=self._local_type,
ws=ws,
api=self.api,
server_port=server_port,
log=self.log,
is_outbound=True,
received_message_callback=self.received_message_callback,
close_callback=self.connection_closed,
peer_id=peer_id,
inbound_rate_limit_percent=self._inbound_rate_limit_percent,
outbound_rate_limit_percent=self._outbound_rate_limit_percent,
local_capabilities_for_handshake=self._local_capabilities_for_handshake,
session=session,
)
await connection.perform_handshake(self._network_id, protocol_version, server_port, self._local_type)
await self.connection_added(connection, on_connect)
# the session has been adopted by the connection, don't close it at
# the end of the function
session = None
connection_type_str = ""
if connection.connection_type is not None:
connection_type_str = connection.connection_type.name.lower()
self.log.info(f"Connected with {connection_type_str} {target_node}")
if is_feeler:
asyncio.create_task(connection.close())
return True
2020-12-10 09:17:54 +03:00
except client_exceptions.ClientConnectorError as e:
self.log.info(f"{e}")
2020-12-11 16:58:17 +03:00
except ProtocolError as e:
2021-02-11 07:47:09 +03:00
if connection is not None:
2021-03-03 21:27:00 +03:00
await connection.close(self.invalid_protocol_ban_seconds, WSCloseCode.PROTOCOL_ERROR, e.code)
2021-02-11 07:42:42 +03:00
if e.code == Err.INVALID_HANDSHAKE:
self.log.warning(f"Invalid handshake with peer {target_node}. Maybe the peer is running old software.")
2021-02-19 07:56:21 +03:00
elif e.code == Err.INCOMPATIBLE_NETWORK_ID:
self.log.warning("Incompatible network ID. Maybe the peer is on another network")
2021-02-11 07:42:42 +03:00
elif e.code == Err.SELF_CONNECTION:
2021-02-11 07:47:09 +03:00
pass
else:
error_stack = traceback.format_exc()
2020-12-11 16:58:17 +03:00
self.log.error(f"Exception {e}, exception Stack: {error_stack}")
except Exception as e:
2021-03-03 21:27:00 +03:00
if connection is not None:
await connection.close(self.invalid_protocol_ban_seconds, WSCloseCode.PROTOCOL_ERROR, Err.UNKNOWN)
2020-12-11 16:58:17 +03:00
error_stack = traceback.format_exc()
self.log.error(f"Exception {e}, exception Stack: {error_stack}")
finally:
if session is not None:
await session.close()
2020-10-16 04:03:46 +03:00
return False
async def connection_closed(
self, connection: WSChiaConnection, ban_time: int, closed_connection: bool = False
) -> None:
# closed_connection is true if the callback is being called with a connection that was previously closed
# in this case we still want to do the banning logic and remove the conection from the list
# but the other cleanup should already have been done so we skip that
if is_localhost(connection.peer_info.host) and ban_time != 0:
self.log.warning(f"Trying to ban localhost for {ban_time}, but will not ban")
ban_time = 0
2021-03-03 21:27:00 +03:00
if ban_time > 0:
ban_until: float = time.time() + ban_time
self.log.warning(f"Banning {connection.peer_info.host} for {ban_time} seconds")
if connection.peer_info.host in self.banned_peers:
if ban_until > self.banned_peers[connection.peer_info.host]:
self.banned_peers[connection.peer_info.host] = ban_until
2021-03-03 21:27:00 +03:00
else:
self.banned_peers[connection.peer_info.host] = ban_until
2021-03-03 21:27:00 +03:00
present_connection = self.all_connections.get(connection.peer_node_id)
if present_connection is connection:
2020-10-28 20:45:10 +03:00
self.all_connections.pop(connection.peer_node_id)
if not closed_connection:
self.log.info(f"Connection closed: {connection.peer_info.host}, node id: {connection.peer_node_id}")
if connection.connection_type is None:
# This means the handshake was never finished with this peer
self.log.debug(
f"Invalid connection type for connection {connection.peer_info.host},"
f" while closing. Handshake never finished."
)
connection.cancel_tasks()
on_disconnect = getattr(self.node, "on_disconnect", None)
if on_disconnect is not None:
await on_disconnect(connection)
2020-10-16 04:03:46 +03:00
async def validate_broadcast_message_type(self, messages: List[Message], node_type: NodeType) -> None:
for message in messages:
if message_requires_reply(ProtocolMessageTypes(message.type)):
# Internal protocol logic error - we will raise, blocking messages to all peers
self.log.error(f"Attempt to broadcast message requiring protocol response: {message.type}")
for _, connection in self.all_connections.items():
if connection.connection_type is node_type:
await connection.close(
ban_time=self.invalid_protocol_ban_seconds,
ws_close_code=WSCloseCode.INTERNAL_ERROR,
error=Err.INTERNAL_PROTOCOL_ERROR,
)
raise ProtocolError(Err.INTERNAL_PROTOCOL_ERROR, [message.type])
async def send_to_all(
self,
messages: List[Message],
node_type: NodeType,
exclude: Optional[bytes32] = None,
) -> None:
await self.validate_broadcast_message_type(messages, node_type)
2020-11-30 11:54:18 +03:00
for _, connection in self.all_connections.items():
2020-12-02 05:53:35 +03:00
if connection.connection_type is node_type and connection.peer_node_id != exclude:
2020-10-28 20:45:10 +03:00
for message in messages:
await connection.send_message(message)
async def send_to_specific(self, messages: List[Message], node_id: bytes32) -> None:
2020-10-28 20:45:10 +03:00
if node_id in self.all_connections:
connection = self.all_connections[node_id]
2020-10-16 04:03:46 +03:00
for message in messages:
await connection.send_message(message)
async def call_api_of_specific(
self, request_method: Callable[..., Awaitable[Optional[Message]]], message_data: Streamable, node_id: bytes32
) -> Optional[Any]:
if node_id in self.all_connections:
connection = self.all_connections[node_id]
return await connection.call_api(request_method, message_data)
return None
def get_connections(
self, node_type: Optional[NodeType] = None, *, outbound: Optional[bool] = None
) -> List[WSChiaConnection]:
2020-10-21 11:19:40 +03:00
result = []
2020-11-30 11:54:18 +03:00
for _, connection in self.all_connections.items():
node_type_match = node_type is None or connection.connection_type == node_type
outbound_match = outbound is None or connection.is_outbound == outbound
if node_type_match and outbound_match:
result.append(connection)
2020-10-21 11:19:40 +03:00
return result
async def close_all_connections(self) -> None:
for connection in self.all_connections.copy().values():
2020-10-21 04:45:09 +03:00
try:
await connection.close()
2020-10-21 04:45:09 +03:00
except Exception as e:
2020-12-02 06:43:30 +03:00
self.log.error(f"Exception while closing connection {e}")
2020-10-21 04:45:09 +03:00
def close_all(self) -> None:
2020-11-30 11:54:18 +03:00
self.connection_close_task = asyncio.create_task(self.close_all_connections())
if self.webserver is not None:
self.webserver.close()
2019-10-30 21:51:27 +03:00
2020-10-16 04:03:46 +03:00
self.shut_down_event.set()
if self.gc_task is not None:
self.gc_task.cancel()
self.gc_task = None
async def await_closed(self) -> None:
self.log.debug("Await Closed")
2020-10-16 04:03:46 +03:00
await self.shut_down_event.wait()
2020-11-30 11:54:18 +03:00
if self.connection_close_task is not None:
await self.connection_close_task
if self.webserver is not None:
await self.webserver.await_closed()
self.webserver = None
2020-12-10 20:38:06 +03:00
2020-10-20 09:40:55 +03:00
async def get_peer_info(self) -> Optional[PeerInfo]:
ip = None
try:
port = self.get_port()
except ValueError:
return None # server doesn't have a local port, just return None here
2020-10-20 09:40:55 +03:00
# Use chia's service first.
2020-10-20 09:40:55 +03:00
try:
timeout = ClientTimeout(total=15)
async with ClientSession(timeout=timeout) as session:
async with session.get("https://ip.chia.net/") as resp:
2020-10-20 09:40:55 +03:00
if resp.status == 200:
ip = str(await resp.text())
ip = ip.rstrip()
except Exception:
ip = None
# Fallback to `checkip` from amazon.
if ip is None:
try:
timeout = ClientTimeout(total=15)
async with ClientSession(timeout=timeout) as session:
async with session.get("https://checkip.amazonaws.com/") as resp:
if resp.status == 200:
ip = str(await resp.text())
ip = ip.rstrip()
except Exception:
ip = None
2020-10-20 09:40:55 +03:00
if ip is None:
return None
try:
return PeerInfo(ip, uint16(port))
except ValueError:
2020-10-20 09:40:55 +03:00
return None
2021-01-21 22:20:50 +03:00
Bind port 0 to fix race condition when grabbing available ports (#11578) * port 0 to fix flakiness * Try fixing setup_full_system * Try fixing setup_full_system, and lint * More attempts to fix * No more calls to get random ports in setup_nodes * Revert accidental changes * Timelord extra arg * Try with port 0 * Fix daemon test, and lint * Try without 0.0.0.0 * Back to 0.0.0.0 * Try a few timelord changes to get test running * Increase timeout again * Use the correct interface to get the port * INFO logging to debug issue * Revert "INFO logging to debug issue" This reverts commit 7c379e5ccafbf7c30f9bd467cb1b5868dc8b3f6e. * Fix advertised port log * Add extra log * Logging back * Rollback the timelord changes * Try port 0 timelord * Revert "Try port 0 timelord" This reverts commit 4997faf3b22c0946cba52b1d1719552d24755355. * Try full green, change ordering * Remove unused var * speed up simulation and cleanup * Now try without the port config * Fix a flaky call to get_event_loop * Try getting the port dynamically * No dynamic port * Try changing the ordering * Try adding a sleep * Back to what works * Timelord before vdf clients * Dynamic port for 1st timelord * Revert "Dynamic port for 1st timelord" This reverts commit 0f322a15b7fbc85d41b2758b2625072ac847f350. * Revert "Timelord before vdf clients" This reverts commit 3286c34696d9d2f12304a88511763ac612bc7928. * Revert "Back to what works" This reverts commit 30380dffb76507a5171e2e2f8bb0ce4108039ad6. * Revert "Try adding a sleep" This reverts commit 9212b665a62a86c4c98d52cbd405ffb76221dd8b. * Revert "Try changing the ordering" This reverts commit a62597d70d1dfffde19ca5d82f73847798304ae1. * Revert "No dynamic port" This reverts commit 5d2e15749b3772b375b66317151044556a0e77fa. * Revert "Try getting the port dynamically" This reverts commit ef9cd75679bd235d6767fe30f8371cbca9b6e244. * Revert "Fix a flaky call to get_event_loop" This reverts commit 01a000fdfbe66353edc5ea432ee84e520b2e53d3. * Try one to 0 * Just not 0 * Don't get port dynamically * Cleanup a bit * Fix * Some cleanup work * Some cleanup work * Fix daemon test * Cleanup * Remove arguments
2022-05-23 18:13:49 +03:00
def get_port(self) -> uint16:
if self._port is None:
raise ValueError("Port not set")
Bind port 0 to fix race condition when grabbing available ports (#11578) * port 0 to fix flakiness * Try fixing setup_full_system * Try fixing setup_full_system, and lint * More attempts to fix * No more calls to get random ports in setup_nodes * Revert accidental changes * Timelord extra arg * Try with port 0 * Fix daemon test, and lint * Try without 0.0.0.0 * Back to 0.0.0.0 * Try a few timelord changes to get test running * Increase timeout again * Use the correct interface to get the port * INFO logging to debug issue * Revert "INFO logging to debug issue" This reverts commit 7c379e5ccafbf7c30f9bd467cb1b5868dc8b3f6e. * Fix advertised port log * Add extra log * Logging back * Rollback the timelord changes * Try port 0 timelord * Revert "Try port 0 timelord" This reverts commit 4997faf3b22c0946cba52b1d1719552d24755355. * Try full green, change ordering * Remove unused var * speed up simulation and cleanup * Now try without the port config * Fix a flaky call to get_event_loop * Try getting the port dynamically * No dynamic port * Try changing the ordering * Try adding a sleep * Back to what works * Timelord before vdf clients * Dynamic port for 1st timelord * Revert "Dynamic port for 1st timelord" This reverts commit 0f322a15b7fbc85d41b2758b2625072ac847f350. * Revert "Timelord before vdf clients" This reverts commit 3286c34696d9d2f12304a88511763ac612bc7928. * Revert "Back to what works" This reverts commit 30380dffb76507a5171e2e2f8bb0ce4108039ad6. * Revert "Try adding a sleep" This reverts commit 9212b665a62a86c4c98d52cbd405ffb76221dd8b. * Revert "Try changing the ordering" This reverts commit a62597d70d1dfffde19ca5d82f73847798304ae1. * Revert "No dynamic port" This reverts commit 5d2e15749b3772b375b66317151044556a0e77fa. * Revert "Try getting the port dynamically" This reverts commit ef9cd75679bd235d6767fe30f8371cbca9b6e244. * Revert "Fix a flaky call to get_event_loop" This reverts commit 01a000fdfbe66353edc5ea432ee84e520b2e53d3. * Try one to 0 * Just not 0 * Don't get port dynamically * Cleanup a bit * Fix * Some cleanup work * Some cleanup work * Fix daemon test * Cleanup * Remove arguments
2022-05-23 18:13:49 +03:00
return uint16(self._port)
def accept_inbound_connections(self, node_type: NodeType) -> bool:
2021-01-21 22:23:32 +03:00
if not self._local_type == NodeType.FULL_NODE:
2021-01-21 22:20:50 +03:00
return True
inbound_count = len(self.get_connections(node_type, outbound=False))
2021-01-21 22:20:50 +03:00
if node_type == NodeType.FULL_NODE:
return inbound_count < cast(int, self.config.get("target_peer_count", 40)) - cast(
int, self.config.get("target_outbound_peer_count", 8)
)
2021-01-21 22:20:50 +03:00
if node_type == NodeType.WALLET:
return inbound_count < cast(int, self.config.get("max_inbound_wallet", 20))
2021-01-21 22:20:50 +03:00
if node_type == NodeType.FARMER:
return inbound_count < cast(int, self.config.get("max_inbound_farmer", 10))
2021-01-21 22:20:50 +03:00
if node_type == NodeType.TIMELORD:
return inbound_count < cast(int, self.config.get("max_inbound_timelord", 5))
2021-01-21 22:20:50 +03:00
return True
def is_trusted_peer(self, peer: WSChiaConnection, trusted_peers: Dict[str, Any]) -> bool:
2023-02-20 19:23:39 +03:00
return is_trusted_peer(
host=peer.peer_info.host,
2023-02-20 19:23:39 +03:00
node_id=peer.peer_node_id,
trusted_peers=trusted_peers,
testing=self.config.get("testing", False),
)
def set_capabilities(self, capabilities: List[Tuple[uint16, str]]) -> None:
self._local_capabilities_for_handshake = capabilities