mirror of
https://github.com/openvinotoolkit/stable-diffusion-webui.git
synced 2024-12-14 22:53:25 +03:00
cache git extension repo information
This commit is contained in:
parent
2b1bae0d75
commit
510e5fc8c6
96
modules/cache.py
Normal file
96
modules/cache.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
import filelock
|
||||||
|
|
||||||
|
from modules.paths import data_path, script_path
|
||||||
|
|
||||||
|
cache_filename = os.path.join(data_path, "cache.json")
|
||||||
|
cache_data = None
|
||||||
|
|
||||||
|
|
||||||
|
def dump_cache():
|
||||||
|
"""
|
||||||
|
Saves all cache data to a file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
with filelock.FileLock(f"{cache_filename}.lock"):
|
||||||
|
with open(cache_filename, "w", encoding="utf8") as file:
|
||||||
|
json.dump(cache_data, file, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
def cache(subsection):
|
||||||
|
"""
|
||||||
|
Retrieves or initializes a cache for a specific subsection.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
subsection (str): The subsection identifier for the cache.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The cache data for the specified subsection.
|
||||||
|
"""
|
||||||
|
|
||||||
|
global cache_data
|
||||||
|
|
||||||
|
if cache_data is None:
|
||||||
|
with filelock.FileLock(f"{cache_filename}.lock"):
|
||||||
|
if not os.path.isfile(cache_filename):
|
||||||
|
cache_data = {}
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(cache_filename, "r", encoding="utf8") as file:
|
||||||
|
cache_data = json.load(file)
|
||||||
|
except Exception:
|
||||||
|
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
|
||||||
|
print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
|
||||||
|
cache_data = {}
|
||||||
|
|
||||||
|
s = cache_data.get(subsection, {})
|
||||||
|
cache_data[subsection] = s
|
||||||
|
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def cached_data_for_file(subsection, title, filename, func):
|
||||||
|
"""
|
||||||
|
Retrieves or generates data for a specific file, using a caching mechanism.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
subsection (str): The subsection of the cache to use.
|
||||||
|
title (str): The title of the data entry in the subsection of the cache.
|
||||||
|
filename (str): The path to the file to be checked for modifications.
|
||||||
|
func (callable): A function that generates the data if it is not available in the cache.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict or None: The cached or generated data, or None if data generation fails.
|
||||||
|
|
||||||
|
The `cached_data_for_file` function implements a caching mechanism for data stored in files.
|
||||||
|
It checks if the data associated with the given `title` is present in the cache and compares the
|
||||||
|
modification time of the file with the cached modification time. If the file has been modified,
|
||||||
|
the cache is considered invalid and the data is regenerated using the provided `func`.
|
||||||
|
Otherwise, the cached data is returned.
|
||||||
|
|
||||||
|
If the data generation fails, None is returned to indicate the failure. Otherwise, the generated
|
||||||
|
or cached data is returned as a dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
existing_cache = cache(subsection)
|
||||||
|
ondisk_mtime = os.path.getmtime(filename)
|
||||||
|
|
||||||
|
entry = existing_cache.get(title)
|
||||||
|
if entry:
|
||||||
|
cached_mtime = existing_cache[title].get("mtime", 0)
|
||||||
|
if ondisk_mtime > cached_mtime:
|
||||||
|
entry = None
|
||||||
|
|
||||||
|
if not entry:
|
||||||
|
entry = func()
|
||||||
|
if entry is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
entry['mtime'] = ondisk_mtime
|
||||||
|
existing_cache[title] = entry
|
||||||
|
|
||||||
|
dump_cache()
|
||||||
|
|
||||||
|
return entry
|
@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from modules import shared, errors
|
from modules import shared, errors, cache
|
||||||
from modules.gitpython_hack import Repo
|
from modules.gitpython_hack import Repo
|
||||||
from modules.paths_internal import extensions_dir, extensions_builtin_dir, script_path # noqa: F401
|
from modules.paths_internal import extensions_dir, extensions_builtin_dir, script_path # noqa: F401
|
||||||
|
|
||||||
@ -21,6 +21,7 @@ def active():
|
|||||||
|
|
||||||
class Extension:
|
class Extension:
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
|
cached_fields = ['remote', 'commit_date', 'branch', 'commit_hash', 'version']
|
||||||
|
|
||||||
def __init__(self, name, path, enabled=True, is_builtin=False):
|
def __init__(self, name, path, enabled=True, is_builtin=False):
|
||||||
self.name = name
|
self.name = name
|
||||||
@ -36,16 +37,30 @@ class Extension:
|
|||||||
self.remote = None
|
self.remote = None
|
||||||
self.have_info_from_repo = False
|
self.have_info_from_repo = False
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {x: getattr(self, x) for x in self.cached_fields}
|
||||||
|
|
||||||
|
def from_dict(self, d):
|
||||||
|
for field in self.cached_fields:
|
||||||
|
setattr(self, field, d[field])
|
||||||
|
|
||||||
def read_info_from_repo(self):
|
def read_info_from_repo(self):
|
||||||
if self.is_builtin or self.have_info_from_repo:
|
if self.is_builtin or self.have_info_from_repo:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def read_from_repo():
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if self.have_info_from_repo:
|
if self.have_info_from_repo:
|
||||||
return
|
return
|
||||||
|
|
||||||
self.do_read_info_from_repo()
|
self.do_read_info_from_repo()
|
||||||
|
|
||||||
|
return self.to_dict()
|
||||||
|
|
||||||
|
d = cache.cached_data_for_file('extensions-git', self.name, os.path.join(self.path, ".git"), read_from_repo)
|
||||||
|
self.from_dict(d)
|
||||||
|
self.status = 'unknown'
|
||||||
|
|
||||||
def do_read_info_from_repo(self):
|
def do_read_info_from_repo(self):
|
||||||
repo = None
|
repo = None
|
||||||
try:
|
try:
|
||||||
@ -58,7 +73,6 @@ class Extension:
|
|||||||
self.remote = None
|
self.remote = None
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.status = 'unknown'
|
|
||||||
self.remote = next(repo.remote().urls, None)
|
self.remote = next(repo.remote().urls, None)
|
||||||
commit = repo.head.commit
|
commit = repo.head.commit
|
||||||
self.commit_date = commit.committed_date
|
self.commit_date = commit.committed_date
|
||||||
|
@ -1,43 +1,11 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import json
|
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
import filelock
|
|
||||||
|
|
||||||
from modules import shared
|
from modules import shared
|
||||||
from modules.paths import data_path, script_path
|
import modules.cache
|
||||||
|
|
||||||
|
dump_cache = modules.cache.dump_cache
|
||||||
cache_filename = os.path.join(data_path, "cache.json")
|
cache = modules.cache.cache
|
||||||
cache_data = None
|
|
||||||
|
|
||||||
|
|
||||||
def dump_cache():
|
|
||||||
with filelock.FileLock(f"{cache_filename}.lock"):
|
|
||||||
with open(cache_filename, "w", encoding="utf8") as file:
|
|
||||||
json.dump(cache_data, file, indent=4)
|
|
||||||
|
|
||||||
|
|
||||||
def cache(subsection):
|
|
||||||
global cache_data
|
|
||||||
|
|
||||||
if cache_data is None:
|
|
||||||
with filelock.FileLock(f"{cache_filename}.lock"):
|
|
||||||
if not os.path.isfile(cache_filename):
|
|
||||||
cache_data = {}
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
with open(cache_filename, "r", encoding="utf8") as file:
|
|
||||||
cache_data = json.load(file)
|
|
||||||
except Exception:
|
|
||||||
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
|
|
||||||
print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
|
|
||||||
cache_data = {}
|
|
||||||
|
|
||||||
s = cache_data.get(subsection, {})
|
|
||||||
cache_data[subsection] = s
|
|
||||||
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_sha256(filename):
|
def calculate_sha256(filename):
|
||||||
|
@ -513,14 +513,8 @@ def refresh_available_extensions_from_data(hide_tags, sort_column, filter_text="
|
|||||||
|
|
||||||
|
|
||||||
def preload_extensions_git_metadata():
|
def preload_extensions_git_metadata():
|
||||||
t0 = time.time()
|
|
||||||
for extension in extensions.extensions:
|
for extension in extensions.extensions:
|
||||||
extension.read_info_from_repo()
|
extension.read_info_from_repo()
|
||||||
print(
|
|
||||||
f"preload_extensions_git_metadata for "
|
|
||||||
f"{len(extensions.extensions)} extensions took "
|
|
||||||
f"{time.time() - t0:.2f}s"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def create_ui():
|
def create_ui():
|
||||||
|
Loading…
Reference in New Issue
Block a user