Automatic rebuild cache on exception, fixes #5213 (#8257)

Try to rebuild cache if an exception is raised, fixes #5213

For now, we catch FileNotFoundError and FileIntegrityError.

Write cache config without manifest to prevent override of manifest_id.
This is needed in order to have an empty manifest_id.
This empty id triggers the re-syncing of the chunks cache by calling sync() inside LocalCache.__init__()

Adapt and extend test_cache_chunks to new behaviour:

- a cache wipe is expected now.
- borg detects the corrupt cache and wipes/rebuilds the cache.
- check if the in-memory and on-disk cache is as expected (a rebuilt chunks cache).
This commit is contained in:
William Bonnaventure 2024-07-06 18:05:01 +02:00 committed by GitHub
parent aada9859ff
commit c3fb27f463
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 46 additions and 11 deletions

View File

@ -531,7 +531,12 @@ def __init__(
self.security_manager.assert_access_unknown(warn_if_unencrypted, manifest, self.key)
self.create()
self.open()
try:
self.open()
except (FileNotFoundError, FileIntegrityError):
self.wipe_cache()
self.open()
try:
self.security_manager.assert_secure(manifest, self.key, cache_config=self.cache_config)
@ -924,19 +929,31 @@ def check_cache_compatibility(self):
return True
def wipe_cache(self):
logger.warning("Discarding incompatible cache and forcing a cache rebuild")
logger.warning("Discarding incompatible or corrupted cache and forcing a cache rebuild")
archive_path = os.path.join(self.path, "chunks.archive.d")
if os.path.isdir(archive_path):
shutil.rmtree(os.path.join(self.path, "chunks.archive.d"))
os.makedirs(os.path.join(self.path, "chunks.archive.d"))
self.chunks = ChunkIndex()
with SaveFile(os.path.join(self.path, files_cache_name()), binary=True):
with IntegrityCheckedFile(path=os.path.join(self.path, "chunks"), write=True) as fd:
self.chunks.write(fd)
self.cache_config.integrity["chunks"] = fd.integrity_data
with IntegrityCheckedFile(path=os.path.join(self.path, files_cache_name()), write=True) as fd:
pass # empty file
self.cache_config.integrity[files_cache_name()] = fd.integrity_data
self.cache_config.manifest_id = ""
self.cache_config._config.set("cache", "manifest", "")
if not self.cache_config._config.has_section("integrity"):
self.cache_config._config.add_section("integrity")
for file, integrity_data in self.cache_config.integrity.items():
self.cache_config._config.set("integrity", file, integrity_data)
# This is needed to pass the integrity check later on inside CacheConfig.load()
self.cache_config._config.set("integrity", "manifest", "")
self.cache_config.ignored_features = set()
self.cache_config.mandatory_features = set()
with SaveFile(self.cache_config.config_path) as fd:
self.cache_config._config.write(fd)
def update_compatibility(self):
operation_to_features_map = self.manifest.get_all_mandatory_features()

View File

@ -2,13 +2,15 @@
import json
import os
from configparser import ConfigParser
from unittest.mock import patch
import pytest
from ...constants import * # NOQA
from ...crypto.file_integrity import FileIntegrityError
from ...helpers import bin_to_hex, Error
from . import cmd, create_src_archive, create_test_files, RK_ENCRYPTION
from ...hashindex import ChunkIndex
from ...cache import LocalCache
def test_check_corrupted_repository(archiver):
@ -43,15 +45,31 @@ def corrupt(file, amount=1):
fd.write(corrupted)
@pytest.mark.allow_cache_wipe
def test_cache_chunks(archiver):
corrupt_archiver(archiver)
corrupt(os.path.join(archiver.cache_path, "chunks"))
if archiver.FORK_DEFAULT:
out = cmd(archiver, "rinfo", exit_code=2)
assert "failed integrity check" in out
else:
with pytest.raises(FileIntegrityError):
cmd(archiver, "rinfo")
create_src_archive(archiver, "test")
chunks_path = os.path.join(archiver.cache_path, "chunks")
chunks_before_corruption = set(ChunkIndex(path=chunks_path).iteritems())
corrupt(chunks_path)
assert not archiver.FORK_DEFAULT # test does not support forking
chunks_in_memory = None
sync_chunks = LocalCache.sync
def sync_wrapper(cache):
nonlocal chunks_in_memory
sync_chunks(cache)
chunks_in_memory = set(cache.chunks.iteritems())
with patch.object(LocalCache, "sync", sync_wrapper):
out = cmd(archiver, "rinfo")
assert chunks_in_memory == chunks_before_corruption
assert "forcing a cache rebuild" in out
chunks_after_repair = set(ChunkIndex(path=chunks_path).iteritems())
assert chunks_after_repair == chunks_before_corruption
def test_cache_files(archiver):