libretro: make update_core.py script run concurrently

This commit is contained in:
Thiago Kenji Okada 2022-12-20 23:18:59 +00:00
parent 8df7516450
commit 5dd47333af

View File

@ -2,12 +2,15 @@
#!nix-shell -I nixpkgs=../../../../ -i python3 -p "python3.withPackages (ps: with ps; [ requests nix-prefetch-github ])" -p "git" #!nix-shell -I nixpkgs=../../../../ -i python3 -p "python3.withPackages (ps: with ps; [ requests nix-prefetch-github ])" -p "git"
import json import json
import sys import os
import subprocess import subprocess
import sys
from pathlib import Path from pathlib import Path
from concurrent.futures import ThreadPoolExecutor
SCRIPT_PATH = Path(__file__).absolute().parent SCRIPT_PATH = Path(__file__).absolute().parent
HASHES_PATH = SCRIPT_PATH / "hashes.json" HASHES_PATH = SCRIPT_PATH / "hashes.json"
GET_REPO_THREADS = int(os.environ.get("GET_REPO_THREADS", 8))
CORES = { CORES = {
"atari800": {"repo": "libretro-atari800"}, "atari800": {"repo": "libretro-atari800"},
"beetle-gba": {"repo": "beetle-gba-libretro"}, "beetle-gba": {"repo": "beetle-gba-libretro"},
@ -27,7 +30,7 @@ CORES = {
"bsnes": {"repo": "bsnes-libretro"}, "bsnes": {"repo": "bsnes-libretro"},
"bsnes-hd": {"repo": "bsnes-hd", "owner": "DerKoun"}, "bsnes-hd": {"repo": "bsnes-hd", "owner": "DerKoun"},
"bsnes-mercury": {"repo": "bsnes-mercury"}, "bsnes-mercury": {"repo": "bsnes-mercury"},
"citra": { "repo": "citra", "fetch_submodules": True }, "citra": {"repo": "citra", "fetch_submodules": True},
"desmume": {"repo": "desmume"}, "desmume": {"repo": "desmume"},
"desmume2015": {"repo": "desmume2015"}, "desmume2015": {"repo": "desmume2015"},
"dolphin": {"repo": "dolphin"}, "dolphin": {"repo": "dolphin"},
@ -141,16 +144,23 @@ def get_repo_hash(fetcher="fetchFromGitHub", **kwargs):
raise ValueError(f"Unsupported fetcher: {fetcher}") raise ValueError(f"Unsupported fetcher: {fetcher}")
def get_repo_hashes(cores_to_update=[]): def get_repo_hashes(cores={}):
def get_repo_hash_from_core_def(core_def):
core, repo = core_def
info(f"Getting repo hash for '{core}'...")
result = core, get_repo_hash(**repo)
info(f"Got repo hash for '{core}'!")
return result
with open(HASHES_PATH) as f: with open(HASHES_PATH) as f:
repo_hashes = json.loads(f.read()) repo_hashes = json.loads(f.read())
for core, repo in CORES.items(): info(f"Running with {GET_REPO_THREADS} threads!")
if core in cores_to_update: with ThreadPoolExecutor(max_workers=GET_REPO_THREADS) as executor:
info(f"Getting repo hash for '{core}'...") new_repo_hashes = executor.map(get_repo_hash_from_core_def, cores.items())
repo_hashes[core] = get_repo_hash(**repo)
else: for core, repo in new_repo_hashes:
info(f"Skipping '{core}'...") repo_hashes[core] = repo
return repo_hashes return repo_hashes
@ -164,7 +174,8 @@ def main():
else: else:
cores_to_update = CORES.keys() cores_to_update = CORES.keys()
repo_hashes = get_repo_hashes(cores_to_update) cores = {core: repo for core, repo in CORES.items() if core in cores_to_update}
repo_hashes = get_repo_hashes(cores)
info(f"Generating '{HASHES_PATH}'...") info(f"Generating '{HASHES_PATH}'...")
with open(HASHES_PATH, "w") as f: with open(HASHES_PATH, "w") as f:
f.write(json.dumps(dict(sorted(repo_hashes.items())), indent=4)) f.write(json.dumps(dict(sorted(repo_hashes.items())), indent=4))