harvester: Some log updates (#13508)

* Log the number of cache entries and the time it took to parse them

* Log `started` and `done` as `INFO` and `batch_processed` as `DEBUG`

* Log the plot found message with `DEBUG` level

* Some additional debug logs in `new_signage_point_harvester`
This commit is contained in:
dustinface 2022-09-29 21:53:06 +02:00 committed by GitHub
parent d06b6b0a57
commit ec059e9a4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 13 additions and 2 deletions

View File

@ -106,7 +106,7 @@ class Harvester:
self.state_changed_callback(change, change_data)
def _plot_refresh_callback(self, event: PlotRefreshEvents, update_result: PlotRefreshResult):
log_function = self.log.debug if event != PlotRefreshEvents.done else self.log.info
log_function = self.log.debug if event == PlotRefreshEvents.batch_processed else self.log.info
log_function(
f"_plot_refresh_callback: event {event.name}, loaded {len(update_result.loaded)}, "
f"removed {len(update_result.removed)}, processed {update_result.processed}, "

View File

@ -63,8 +63,14 @@ class HarvesterAPI:
"""
if not self.harvester.plot_manager.public_keys_available():
# This means that we have not received the handshake yet
self.harvester.log.debug("new_signage_point_harvester received with no keys available")
return None
self.harvester.log.debug(
f"new_signage_point_harvester lookup: challenge_hash: {new_challenge.challenge_hash}, "
f"sp_hash: {new_challenge.sp_hash}, signage_point_index: {new_challenge.signage_point_index}"
)
start = time.time()
assert len(new_challenge.challenge_hash) == 32
@ -172,6 +178,7 @@ class HarvesterAPI:
passed = 0
total = 0
with self.harvester.plot_manager:
self.harvester.log.debug("new_signage_point_harvester lock acquired")
for try_plot_filename, try_plot_info in self.harvester.plot_manager.plots.items():
# Passes the plot filter (does not check sp filter yet though, since we have not reached sp)
# This is being executed at the beginning of the slot
@ -184,6 +191,7 @@ class HarvesterAPI:
):
passed += 1
awaitables.append(lookup_challenge(try_plot_filename, try_plot_info))
self.harvester.log.debug(f"new_signage_point_harvester {passed} plots passed the plot filter")
# Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
total_proofs_found = 0

View File

@ -132,6 +132,7 @@ class Cache:
log.info(f"Loaded {len(serialized)} bytes of cached data")
stored_cache: VersionedBlob = VersionedBlob.from_bytes(serialized)
if stored_cache.version == CURRENT_VERSION:
start = time.time()
cache_data: CacheDataV1 = CacheDataV1.from_bytes(stored_cache.blob)
self._data = {}
estimated_c2_sizes: Dict[int, int] = {}
@ -165,6 +166,8 @@ class Cache:
else:
self._data[Path(path)] = new_entry
log.info(f"Parsed {len(self._data)} cache entries in {time.time() - start:.2f}s")
else:
raise ValueError(f"Invalid cache version {stored_cache.version}. Expected version {CURRENT_VERSION}.")
except FileNotFoundError:

View File

@ -339,7 +339,7 @@ class PlotManager:
log.error(f"Failed to open file {file_path}. {e} {tb}")
self.failed_to_open_filenames[file_path] = int(time.time())
return None
log.info(f"Found plot {file_path} of size {new_plot_info.prover.get_size()}, cache_hit: {cache_hit}")
log.debug(f"Found plot {file_path} of size {new_plot_info.prover.get_size()}, cache_hit: {cache_hit}")
return new_plot_info