2017-02-02 13:49:05 +03:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# License: GPL v3 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
|
|
|
|
|
|
|
|
import argparse
|
2023-01-21 10:26:00 +03:00
|
|
|
import base64
|
|
|
|
import contextlib
|
2021-09-16 20:14:18 +03:00
|
|
|
import datetime
|
2022-11-14 14:37:32 +03:00
|
|
|
import glob
|
2017-02-02 13:49:05 +03:00
|
|
|
import io
|
|
|
|
import json
|
|
|
|
import mimetypes
|
|
|
|
import os
|
|
|
|
import pprint
|
|
|
|
import re
|
|
|
|
import shlex
|
2018-06-08 07:43:15 +03:00
|
|
|
import shutil
|
2017-02-02 13:49:05 +03:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2018-06-08 08:06:33 +03:00
|
|
|
import tempfile
|
2017-02-02 13:49:05 +03:00
|
|
|
import time
|
2021-09-16 19:43:01 +03:00
|
|
|
from contextlib import contextmanager, suppress
|
2023-01-21 10:26:00 +03:00
|
|
|
from http.client import HTTPResponse, HTTPSConnection
|
2023-01-21 10:36:16 +03:00
|
|
|
from typing import Any, Callable, Dict, Generator, Iterable, Optional, Tuple, Union
|
2023-01-21 10:26:00 +03:00
|
|
|
from urllib.parse import urlencode, urlparse
|
2017-02-02 13:49:05 +03:00
|
|
|
|
|
|
|
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
2018-06-08 07:43:15 +03:00
|
|
|
docs_dir = os.path.abspath('docs')
|
|
|
|
publish_dir = os.path.abspath(os.path.join('..', 'kovidgoyal.github.io', 'kitty'))
|
2022-02-21 08:48:21 +03:00
|
|
|
building_nightly = False
|
2019-08-01 21:21:26 +03:00
|
|
|
with open('kitty/constants.py') as f:
|
|
|
|
raw = f.read()
|
2020-03-15 11:06:09 +03:00
|
|
|
nv = re.search(r'^version: Version\s+=\s+Version\((\d+), (\d+), (\d+)\)', raw, flags=re.MULTILINE)
|
2020-03-05 15:53:45 +03:00
|
|
|
if nv is not None:
|
2021-11-18 20:10:44 +03:00
|
|
|
version = f'{nv.group(1)}.{nv.group(2)}.{nv.group(3)}'
|
2020-03-15 11:06:09 +03:00
|
|
|
ap = re.search(r"^appname: str\s+=\s+'([^']+)'", raw, flags=re.MULTILINE)
|
|
|
|
if ap is not None:
|
|
|
|
appname = ap.group(1)
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2018-06-08 08:06:33 +03:00
|
|
|
ALL_ACTIONS = 'man html build tag sdist upload website'.split()
|
2021-09-20 08:58:34 +03:00
|
|
|
NIGHTLY_ACTIONS = 'man html build sdist upload_nightly'.split()
|
2017-02-02 13:49:05 +03:00
|
|
|
|
|
|
|
|
2021-09-21 05:47:53 +03:00
|
|
|
def echo_cmd(cmd: Iterable[str]) -> None:
|
|
|
|
isatty = sys.stdout.isatty()
|
|
|
|
end = '\n'
|
|
|
|
if isatty:
|
2022-01-29 15:14:36 +03:00
|
|
|
end = f'\x1b[m{end}'
|
2021-09-21 05:47:53 +03:00
|
|
|
print('\x1b[92m', end='')
|
|
|
|
print(shlex.join(cmd), end=end, flush=True)
|
|
|
|
|
|
|
|
|
|
|
|
def call(*cmd: str, cwd: Optional[str] = None, echo: bool = False) -> None:
|
2017-02-02 13:49:05 +03:00
|
|
|
if len(cmd) == 1:
|
2020-03-08 19:38:18 +03:00
|
|
|
q = shlex.split(cmd[0])
|
|
|
|
else:
|
|
|
|
q = list(cmd)
|
2021-09-21 05:47:53 +03:00
|
|
|
if echo:
|
|
|
|
echo_cmd(cmd)
|
2020-03-08 19:38:18 +03:00
|
|
|
ret = subprocess.Popen(q, cwd=cwd).wait()
|
2017-02-02 13:49:05 +03:00
|
|
|
if ret != 0:
|
|
|
|
raise SystemExit(ret)
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def run_build(args: Any) -> None:
|
2022-02-21 08:48:21 +03:00
|
|
|
|
|
|
|
def run_with_retry(cmd: str) -> None:
|
2021-12-22 05:15:10 +03:00
|
|
|
try:
|
2022-02-21 08:48:21 +03:00
|
|
|
call(cmd, echo=True)
|
2021-12-23 05:41:16 +03:00
|
|
|
except (SystemExit, Exception):
|
2022-02-21 08:48:21 +03:00
|
|
|
needs_retry = 'arm64' in cmd or building_nightly
|
|
|
|
if not needs_retry:
|
2021-12-22 05:15:10 +03:00
|
|
|
raise
|
2022-02-21 08:48:21 +03:00
|
|
|
print('Build failed, retrying in a few seconds...', file=sys.stderr)
|
2022-02-28 04:51:46 +03:00
|
|
|
if 'macos' in cmd:
|
|
|
|
call('python ../bypy macos shutdown')
|
|
|
|
time.sleep(25)
|
2022-02-21 08:48:21 +03:00
|
|
|
call(cmd, echo=True)
|
|
|
|
|
|
|
|
for x in ('64', '32', 'arm64'):
|
2022-11-15 09:02:34 +03:00
|
|
|
prefix = f'python ../bypy linux --arch {x} '
|
2022-11-17 07:57:53 +03:00
|
|
|
run_with_retry(prefix + 'program --non-interactive')
|
2022-11-15 09:02:34 +03:00
|
|
|
call(prefix + 'shutdown', echo=True)
|
2022-11-17 07:57:53 +03:00
|
|
|
run_with_retry('python ../bypy macos program --sign-installers --notarize --non-interactive')
|
2022-11-15 09:02:34 +03:00
|
|
|
call('python ../bypy macos shutdown', echo=True)
|
2022-11-14 14:37:32 +03:00
|
|
|
call('./setup.py build-static-binaries')
|
2017-02-02 13:49:05 +03:00
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def run_tag(args: Any) -> None:
|
2017-12-01 07:58:43 +03:00
|
|
|
call('git push')
|
2017-02-02 13:49:05 +03:00
|
|
|
call('git tag -s v{0} -m version-{0}'.format(version))
|
2021-11-18 20:10:44 +03:00
|
|
|
call(f'git push origin v{version}')
|
2017-02-02 13:49:05 +03:00
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def run_man(args: Any) -> None:
|
2022-01-23 07:42:11 +03:00
|
|
|
call('make FAIL_WARN=1 man', cwd=docs_dir)
|
2018-06-08 07:43:15 +03:00
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def run_html(args: Any) -> None:
|
2022-06-30 14:30:14 +03:00
|
|
|
call('make FAIL_WARN=1 "OPTS=-D analytics_id=G-XTJK3R7GF2" dirhtml', cwd=docs_dir)
|
2021-07-17 07:29:46 +03:00
|
|
|
add_old_redirects('docs/_build/dirhtml')
|
|
|
|
|
|
|
|
|
2021-12-10 12:34:39 +03:00
|
|
|
def generate_redirect_html(link_name: str, bname: str) -> None:
|
|
|
|
with open(link_name, 'w') as f:
|
|
|
|
f.write(f'''
|
2021-07-17 07:29:46 +03:00
|
|
|
<html>
|
|
|
|
<head>
|
|
|
|
<title>Redirecting...</title>
|
|
|
|
<link rel="canonical" href="{bname}/" />
|
2021-07-20 11:21:50 +03:00
|
|
|
<noscript>
|
2021-07-17 07:29:46 +03:00
|
|
|
<meta http-equiv="refresh" content="0;url={bname}/" />
|
2021-07-20 11:21:50 +03:00
|
|
|
</noscript>
|
|
|
|
<script type="text/javascript">
|
|
|
|
window.location.replace('./{bname}/' + window.location.hash);
|
|
|
|
</script>
|
2021-07-17 07:29:46 +03:00
|
|
|
</head>
|
|
|
|
<body>
|
|
|
|
<p>Redirecting, please wait...</p>
|
|
|
|
</body>
|
|
|
|
</html>
|
|
|
|
''')
|
2019-03-07 09:37:49 +03:00
|
|
|
|
|
|
|
|
2021-12-10 12:34:39 +03:00
|
|
|
def add_old_redirects(loc: str) -> None:
|
|
|
|
for dirpath, dirnames, filenames in os.walk(loc):
|
|
|
|
if dirpath != loc:
|
|
|
|
for fname in filenames:
|
|
|
|
if fname == 'index.html':
|
|
|
|
bname = os.path.basename(dirpath)
|
|
|
|
base = os.path.dirname(dirpath)
|
|
|
|
link_name = os.path.join(base, f'{bname}.html') if base else f'{bname}.html'
|
|
|
|
generate_redirect_html(link_name, bname)
|
|
|
|
|
2021-12-10 15:29:09 +03:00
|
|
|
old_unicode_input_path = os.path.join(loc, 'kittens', 'unicode-input')
|
|
|
|
os.makedirs(old_unicode_input_path, exist_ok=True)
|
|
|
|
generate_redirect_html(os.path.join(old_unicode_input_path, 'index.html'), '../unicode_input')
|
|
|
|
generate_redirect_html(f'{old_unicode_input_path}.html', 'unicode_input')
|
2021-12-10 12:34:39 +03:00
|
|
|
|
|
|
|
|
2020-10-05 17:34:32 +03:00
|
|
|
def run_docs(args: Any) -> None:
|
|
|
|
subprocess.check_call(['make', 'docs'])
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def run_website(args: Any) -> None:
|
2018-06-08 07:43:15 +03:00
|
|
|
if os.path.exists(publish_dir):
|
|
|
|
shutil.rmtree(publish_dir)
|
2021-07-17 07:29:46 +03:00
|
|
|
shutil.copytree(os.path.join(docs_dir, '_build', 'dirhtml'), publish_dir, symlinks=True)
|
2019-01-29 19:18:11 +03:00
|
|
|
with open(os.path.join(publish_dir, 'current-version.txt'), 'w') as f:
|
|
|
|
f.write(version)
|
2018-06-08 07:43:15 +03:00
|
|
|
shutil.copy2(os.path.join(docs_dir, 'installer.sh'), publish_dir)
|
|
|
|
os.chdir(os.path.dirname(publish_dir))
|
|
|
|
subprocess.check_call(['git', 'add', 'kitty'])
|
|
|
|
subprocess.check_call(['git', 'commit', '-m', 'kitty website updates'])
|
|
|
|
subprocess.check_call(['git', 'push'])
|
2018-05-30 08:11:37 +03:00
|
|
|
|
|
|
|
|
2021-04-29 05:38:35 +03:00
|
|
|
def sign_file(path: str) -> None:
|
2022-01-29 15:14:36 +03:00
|
|
|
dest = f'{path}.sig'
|
2021-09-16 20:01:18 +03:00
|
|
|
with suppress(FileNotFoundError):
|
|
|
|
os.remove(dest)
|
2021-04-29 05:38:35 +03:00
|
|
|
subprocess.check_call([
|
2022-01-29 15:14:36 +03:00
|
|
|
os.environ['PENV'] + '/gpg-as-kovid', '--output', f'{path}.sig',
|
2021-04-29 05:38:35 +03:00
|
|
|
'--detach-sig', path
|
|
|
|
])
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def run_sdist(args: Any) -> None:
|
2018-06-08 08:06:33 +03:00
|
|
|
with tempfile.TemporaryDirectory() as tdir:
|
|
|
|
base = os.path.join(tdir, f'kitty-{version}')
|
|
|
|
os.mkdir(base)
|
2022-01-29 15:14:36 +03:00
|
|
|
subprocess.check_call(f'git archive HEAD | tar -x -C {base}', shell=True)
|
2018-06-08 08:06:33 +03:00
|
|
|
dest = os.path.join(base, 'docs', '_build')
|
|
|
|
os.mkdir(dest)
|
|
|
|
for x in 'html man'.split():
|
|
|
|
shutil.copytree(os.path.join(docs_dir, '_build', x), os.path.join(dest, x))
|
|
|
|
dest = os.path.abspath(os.path.join('build', f'kitty-{version}.tar'))
|
|
|
|
subprocess.check_call(['tar', '-cf', dest, os.path.basename(base)], cwd=tdir)
|
2019-06-03 12:50:07 +03:00
|
|
|
with suppress(FileNotFoundError):
|
2022-01-29 15:14:36 +03:00
|
|
|
os.remove(f'{dest}.xz')
|
2018-06-08 08:06:33 +03:00
|
|
|
subprocess.check_call(['xz', '-9', dest])
|
2022-01-29 15:14:36 +03:00
|
|
|
sign_file(f'{dest}.xz')
|
2018-06-08 08:06:33 +03:00
|
|
|
|
|
|
|
|
2020-03-08 19:38:18 +03:00
|
|
|
class ReadFileWithProgressReporting(io.FileIO): # {{{
|
2020-03-14 12:07:11 +03:00
|
|
|
def __init__(self, path: str):
|
2020-03-08 19:38:18 +03:00
|
|
|
io.FileIO.__init__(self, path, 'rb')
|
2017-02-02 13:49:05 +03:00
|
|
|
self.seek(0, os.SEEK_END)
|
|
|
|
self._total = self.tell()
|
|
|
|
self.seek(0)
|
2020-03-08 19:38:18 +03:00
|
|
|
self.start_time = time.monotonic()
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2020-03-08 19:38:18 +03:00
|
|
|
def __len__(self) -> int:
|
2017-02-02 13:49:05 +03:00
|
|
|
return self._total
|
|
|
|
|
2020-10-12 20:33:07 +03:00
|
|
|
def read(self, size: int = -1) -> bytes:
|
2020-03-08 19:38:18 +03:00
|
|
|
data = io.FileIO.read(self, size)
|
2017-02-02 13:49:05 +03:00
|
|
|
if data:
|
|
|
|
self.report_progress(len(data))
|
|
|
|
return data
|
|
|
|
|
2020-03-08 19:38:18 +03:00
|
|
|
def report_progress(self, size: int) -> None:
|
|
|
|
def write(*args: str) -> None:
|
2017-02-02 14:00:53 +03:00
|
|
|
print(*args, end='')
|
|
|
|
|
2023-01-22 06:11:38 +03:00
|
|
|
frac = int(self.tell() * 100 / self._total)
|
2017-02-02 13:49:05 +03:00
|
|
|
mb_pos = self.tell() / float(1024**2)
|
|
|
|
mb_tot = self._total / float(1024**2)
|
|
|
|
kb_pos = self.tell() / 1024.0
|
2020-03-08 19:38:18 +03:00
|
|
|
kb_rate = kb_pos / (time.monotonic() - self.start_time)
|
2017-02-02 13:49:05 +03:00
|
|
|
bit_rate = kb_rate * 1024
|
|
|
|
eta = int((self._total - self.tell()) / bit_rate) + 1
|
2023-01-22 06:11:38 +03:00
|
|
|
eta_m, eta_s = divmod(eta, 60)
|
2021-09-22 05:44:47 +03:00
|
|
|
if sys.stdout.isatty():
|
|
|
|
write(
|
2023-01-22 06:11:38 +03:00
|
|
|
f'\r\033[K\033[?7h {frac}% {mb_pos:.1f}/{mb_tot:.1f}MB {kb_rate:.1f} KB/sec {eta_m} minutes, {eta_s} seconds left\033[?7l')
|
2017-02-02 13:49:05 +03:00
|
|
|
if self.tell() >= self._total:
|
2021-09-22 05:44:47 +03:00
|
|
|
t = int(time.monotonic() - self.start_time) + 1
|
2021-10-01 05:27:21 +03:00
|
|
|
print(f'\nUpload took {t//60} minutes and {t%60} seconds at {kb_rate:.1f} KB/sec')
|
2017-02-02 13:49:05 +03:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
|
|
|
|
# }}}
|
|
|
|
|
|
|
|
|
2023-01-21 10:36:16 +03:00
|
|
|
class GitHub: # {{{
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2022-01-29 15:14:36 +03:00
|
|
|
API = 'https://api.github.com'
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
files: Dict[str, str],
|
|
|
|
reponame: str,
|
|
|
|
version: str,
|
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
replace: bool = False
|
|
|
|
):
|
2017-02-02 13:49:05 +03:00
|
|
|
self.files, self.reponame, self.version, self.username, self.password, self.replace = (
|
|
|
|
files, reponame, version, username, password, replace)
|
2022-01-29 15:14:36 +03:00
|
|
|
self.current_tag_name = self.version if self.version == 'nightly' else f'v{self.version}'
|
2021-09-16 19:43:01 +03:00
|
|
|
self.is_nightly = self.current_tag_name == 'nightly'
|
2023-01-22 05:30:29 +03:00
|
|
|
self.auth = 'Basic ' + base64.standard_b64encode(f'{self.username}:{self.password}'.encode()).decode()
|
2022-01-29 15:14:36 +03:00
|
|
|
self.url_base = f'{self.API}/repos/{self.username}/{self.reponame}/releases'
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2023-01-21 10:36:16 +03:00
|
|
|
def info(self, *args: Any) -> None:
|
|
|
|
print(*args, flush=True)
|
|
|
|
|
|
|
|
def error(self, *args: Any) -> None:
|
|
|
|
print(*args, flush=True, file=sys.stderr)
|
|
|
|
|
2023-01-21 10:26:00 +03:00
|
|
|
def make_request(
|
|
|
|
self, url: str, data: Optional[Dict[str, Any]] = None, method:str = 'GET',
|
|
|
|
upload_data: Optional[ReadFileWithProgressReporting] = None,
|
|
|
|
params: Optional[Dict[str, str]] = None,
|
|
|
|
) -> HTTPSConnection:
|
|
|
|
headers={
|
|
|
|
'Authorization': self.auth,
|
2023-01-22 05:30:29 +03:00
|
|
|
'Accept': 'application/vnd.github+json',
|
|
|
|
'User-Agent': 'kitty',
|
2023-01-21 10:26:00 +03:00
|
|
|
}
|
|
|
|
if params:
|
|
|
|
url += '?' + urlencode(params)
|
|
|
|
rdata: Optional[Union[bytes, io.FileIO]] = None
|
|
|
|
if data is not None:
|
|
|
|
rdata = json.dumps(data).encode('utf-8')
|
|
|
|
headers['Content-Type'] = 'application/json'
|
|
|
|
headers['Content-Length'] = str(len(rdata))
|
|
|
|
elif upload_data is not None:
|
|
|
|
rdata = upload_data
|
|
|
|
mime_type = mimetypes.guess_type(os.path.basename(str(upload_data.name)))[0] or 'application/octet-stream'
|
|
|
|
headers['Content-Type'] = mime_type
|
|
|
|
headers['Content-Length'] = str(upload_data._total)
|
|
|
|
purl = urlparse(url)
|
|
|
|
conn = HTTPSConnection(purl.netloc, timeout=60)
|
|
|
|
conn.request(method, url, body=rdata, headers=headers)
|
|
|
|
return conn
|
|
|
|
|
|
|
|
def make_request_with_retries(
|
|
|
|
self, url: str, data: Optional[Dict[str, str]] = None, method:str = 'GET',
|
|
|
|
num_tries: int = 2, sleep_between_tries: float = 15,
|
|
|
|
success_codes: Tuple[int, ...] = (200,),
|
|
|
|
failure_msg: str = 'Request failed',
|
|
|
|
return_data: bool = False,
|
|
|
|
upload_path: str = '',
|
|
|
|
params: Optional[Dict[str, str]] = None,
|
|
|
|
failure_callback: Callable[[HTTPResponse], None] = lambda r: None,
|
|
|
|
) -> Any:
|
|
|
|
for i in range(num_tries):
|
|
|
|
try:
|
2023-01-22 05:30:29 +03:00
|
|
|
if upload_path:
|
|
|
|
conn = self.make_request(url, method='POST', upload_data=ReadFileWithProgressReporting(upload_path), params=params)
|
|
|
|
else:
|
|
|
|
conn = self.make_request(url, data, method, params=params)
|
2023-01-21 10:26:00 +03:00
|
|
|
with contextlib.closing(conn):
|
|
|
|
r = conn.getresponse()
|
|
|
|
if r.status in success_codes:
|
2023-01-21 10:36:16 +03:00
|
|
|
return json.loads(r.read()) if return_data else None
|
2023-01-21 10:26:00 +03:00
|
|
|
if i == num_tries -1 :
|
|
|
|
self.fail(r, failure_msg)
|
|
|
|
else:
|
|
|
|
self.print_failed_response_details(r, failure_msg)
|
|
|
|
failure_callback(r)
|
|
|
|
except Exception as e:
|
2023-01-21 10:36:16 +03:00
|
|
|
self.error(failure_msg, 'with error:', e)
|
|
|
|
self.error(f'Retrying after {sleep_between_tries} seconds')
|
2023-01-21 10:26:00 +03:00
|
|
|
time.sleep(sleep_between_tries)
|
2023-01-21 10:36:16 +03:00
|
|
|
return None
|
2023-01-21 10:26:00 +03:00
|
|
|
|
|
|
|
def patch(self, url: str, fail_msg: str, **data: str) -> None:
|
|
|
|
self.make_request_with_retries(url, data, method='PATCH', failure_msg=fail_msg)
|
2021-09-16 19:43:01 +03:00
|
|
|
|
2021-09-16 20:14:18 +03:00
|
|
|
def update_nightly_description(self, release_id: int) -> None:
|
2022-01-29 15:14:36 +03:00
|
|
|
url = f'{self.url_base}/{release_id}'
|
2021-09-16 20:14:18 +03:00
|
|
|
now = str(datetime.datetime.utcnow()).split('.')[0] + ' UTC'
|
2022-08-31 04:52:56 +03:00
|
|
|
commit = subprocess.check_output(['git', 'rev-parse', '--verify', '--end-of-options', 'master^{commit}']).decode('utf-8').strip()
|
2021-09-17 05:02:51 +03:00
|
|
|
self.patch(
|
|
|
|
url, 'Failed to update nightly release description',
|
|
|
|
body=f'Nightly release, generated on: {now} from commit: {commit}.'
|
|
|
|
' For how to install nightly builds, see: https://sw.kovidgoyal.net/kitty/binary/#customizing-the-installation'
|
|
|
|
)
|
2021-09-16 20:14:18 +03:00
|
|
|
|
2023-01-21 10:26:00 +03:00
|
|
|
def delete_asset(self, url: str, fname: str) -> None:
|
|
|
|
self.make_request_with_retries(
|
|
|
|
url, method='DELETE', num_tries=5, sleep_between_tries=2,
|
|
|
|
success_codes=(204, 404),
|
|
|
|
failure_msg=f'Failed to delete {fname} from GitHub')
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def __call__(self) -> None:
|
2022-11-17 08:14:52 +03:00
|
|
|
# See https://docs.github.com/en/rest/releases/assets#upload-a-release-asset
|
2017-02-02 13:49:05 +03:00
|
|
|
# self.clean_older_releases(releases)
|
2021-09-29 08:22:51 +03:00
|
|
|
release = self.create_release()
|
2017-02-02 13:49:05 +03:00
|
|
|
upload_url = release['upload_url'].partition('{')[0]
|
2022-01-29 15:14:36 +03:00
|
|
|
asset_url = f'{self.url_base}/assets/{{}}'
|
2017-02-02 13:49:05 +03:00
|
|
|
existing_assets = self.existing_assets(release['id'])
|
2022-11-19 08:53:31 +03:00
|
|
|
|
|
|
|
def delete_asset(asset_id: str) -> None:
|
2023-01-21 10:26:00 +03:00
|
|
|
self.delete_asset(asset_url.format(asset_id), fname)
|
2022-11-19 08:53:31 +03:00
|
|
|
|
2023-01-20 04:41:52 +03:00
|
|
|
def upload_with_retries(path: str, desc: str, num_tries: int = 8, sleep_time: float = 60.0) -> None:
|
2017-02-02 13:49:05 +03:00
|
|
|
fname = os.path.basename(path)
|
2021-09-16 19:43:01 +03:00
|
|
|
if self.is_nightly:
|
|
|
|
fname = fname.replace(version, 'nightly')
|
2017-02-02 13:49:05 +03:00
|
|
|
if fname in existing_assets:
|
2021-09-16 19:43:01 +03:00
|
|
|
self.info(f'Deleting {fname} from GitHub with id: {existing_assets[fname]}')
|
2023-01-21 10:36:16 +03:00
|
|
|
delete_asset(existing_assets.pop(fname))
|
2023-01-21 10:26:00 +03:00
|
|
|
params = {'name': fname, 'label': desc}
|
|
|
|
|
|
|
|
def handle_failure(r: HTTPResponse) -> None:
|
2022-11-17 06:15:26 +03:00
|
|
|
try:
|
2023-01-21 10:26:00 +03:00
|
|
|
asset_id = json.loads(r.read())['id']
|
|
|
|
except Exception:
|
2022-11-20 04:45:31 +03:00
|
|
|
try:
|
2023-01-21 10:26:00 +03:00
|
|
|
asset_id = self.existing_assets(release['id'])[fname]
|
|
|
|
except KeyError:
|
|
|
|
asset_id = 0
|
|
|
|
if asset_id:
|
|
|
|
self.info(f'Deleting {fname} from GitHub with id: {asset_id}')
|
|
|
|
delete_asset(asset_id)
|
|
|
|
|
|
|
|
|
|
|
|
self.make_request_with_retries(
|
|
|
|
upload_url, upload_path=path, params=params, num_tries=num_tries, sleep_between_tries=sleep_time,
|
|
|
|
failure_msg=f'Failed to upload file: {fname}', success_codes=(201,), failure_callback=handle_failure
|
|
|
|
)
|
2022-11-19 09:01:43 +03:00
|
|
|
|
|
|
|
if self.is_nightly:
|
2022-11-20 04:25:22 +03:00
|
|
|
for fname in tuple(existing_assets):
|
2022-11-19 09:01:43 +03:00
|
|
|
self.info(f'Deleting {fname} from GitHub with id: {existing_assets[fname]}')
|
2023-01-21 10:36:16 +03:00
|
|
|
delete_asset(existing_assets.pop(fname))
|
2022-11-19 09:01:43 +03:00
|
|
|
self.update_nightly_description(release['id'])
|
|
|
|
for path, desc in self.files.items():
|
|
|
|
self.info('')
|
|
|
|
upload_with_retries(path, desc)
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def clean_older_releases(self, releases: Iterable[Dict[str, Any]]) -> None:
|
2017-02-02 13:49:05 +03:00
|
|
|
for release in releases:
|
2023-01-21 10:36:16 +03:00
|
|
|
if release.get('assets') and release['tag_name'] != self.current_tag_name:
|
2022-01-28 14:34:13 +03:00
|
|
|
self.info(f'\nDeleting old released installers from: {release["tag_name"]}')
|
2017-02-02 13:49:05 +03:00
|
|
|
for asset in release['assets']:
|
2023-01-21 10:26:00 +03:00
|
|
|
self.delete_asset(
|
|
|
|
f'{self.url_base}/assets/{asset["id"]}', asset['name'])
|
|
|
|
|
|
|
|
def print_failed_response_details(self, r: HTTPResponse, msg: str) -> None:
|
2023-01-21 10:36:16 +03:00
|
|
|
self.error(msg, f'\nStatus Code: {r.status} {r.reason}')
|
2022-11-18 07:46:29 +03:00
|
|
|
try:
|
2023-01-21 10:26:00 +03:00
|
|
|
jr = json.loads(r.read())
|
2022-11-18 07:46:29 +03:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
else:
|
2023-01-21 10:36:16 +03:00
|
|
|
self.error('JSON from response:')
|
2022-11-18 07:46:29 +03:00
|
|
|
pprint.pprint(jr, stream=sys.stderr)
|
2022-11-17 08:14:52 +03:00
|
|
|
|
2023-01-21 10:26:00 +03:00
|
|
|
def fail(self, r: HTTPResponse, msg: str) -> None:
|
2022-11-17 08:14:52 +03:00
|
|
|
self.print_failed_response_details(r, msg)
|
2017-02-02 13:49:05 +03:00
|
|
|
raise SystemExit(1)
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def existing_assets(self, release_id: str) -> Dict[str, str]:
|
2022-01-29 15:14:36 +03:00
|
|
|
url = f'{self.url_base}/{release_id}/assets'
|
2023-01-21 10:26:00 +03:00
|
|
|
d = self.make_request_with_retries(url, failure_msg='Failed to get assets for release', return_data=True)
|
|
|
|
return {asset['name']: asset['id'] for asset in d}
|
2017-02-02 13:49:05 +03:00
|
|
|
|
2021-09-29 08:22:51 +03:00
|
|
|
def create_release(self) -> Dict[str, Any]:
|
2017-02-02 13:49:05 +03:00
|
|
|
' Create a release on GitHub or if it already exists, return the existing release '
|
2021-09-29 08:22:51 +03:00
|
|
|
# Check for existing release
|
2022-01-29 15:14:36 +03:00
|
|
|
url = f'{self.url_base}/tags/{self.current_tag_name}'
|
2023-01-21 10:26:00 +03:00
|
|
|
with contextlib.closing(self.make_request(url)) as conn:
|
|
|
|
r = conn.getresponse()
|
|
|
|
if r.status == 200:
|
|
|
|
return {str(k): v for k, v in json.loads(r.read()).items()}
|
2021-09-16 19:43:01 +03:00
|
|
|
if self.is_nightly:
|
2023-01-22 05:30:29 +03:00
|
|
|
self.fail(r, 'No existing nightly release found on GitHub')
|
2023-01-21 10:26:00 +03:00
|
|
|
data = {
|
|
|
|
'tag_name': self.current_tag_name,
|
|
|
|
'target_commitish': 'master',
|
|
|
|
'name': f'version {self.version}',
|
|
|
|
'body': f'Release version {self.version}.'
|
|
|
|
' For changelog, see https://sw.kovidgoyal.net/kitty/changelog/#detailed-list-of-changes'
|
|
|
|
' GPG key used for signing tarballs is: https://calibre-ebook.com/signatures/kovid.gpg',
|
|
|
|
'draft': False,
|
|
|
|
'prerelease': False
|
|
|
|
}
|
|
|
|
with contextlib.closing(self.make_request(self.url_base, method='POST', data=data)) as conn:
|
|
|
|
r = conn.getresponse()
|
|
|
|
if r.status != 201:
|
|
|
|
self.fail(r, f'Failed to create release for version: {self.version}')
|
|
|
|
return {str(k): v for k, v in json.loads(r.read()).items()}
|
2017-02-02 13:49:05 +03:00
|
|
|
# }}}
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def get_github_data() -> Dict[str, str]:
|
2019-12-12 07:35:11 +03:00
|
|
|
with open(os.environ['PENV'] + '/github-token') as f:
|
2017-02-02 13:49:05 +03:00
|
|
|
un, pw = f.read().strip().split(':')
|
|
|
|
return {'username': un, 'password': pw}
|
|
|
|
|
|
|
|
|
2021-09-16 19:43:01 +03:00
|
|
|
def files_for_upload() -> Dict[str, str]:
|
2021-04-29 05:38:35 +03:00
|
|
|
files = {}
|
|
|
|
signatures = {}
|
|
|
|
for f, desc in {
|
|
|
|
'macos/dist/kitty-{}.dmg': 'macOS dmg',
|
2021-12-10 04:53:12 +03:00
|
|
|
'linux/64/dist/kitty-{}-x86_64.txz': 'Linux amd64 binary bundle',
|
|
|
|
'linux/32/dist/kitty-{}-i686.txz': 'Linux x86 binary bundle',
|
2021-12-10 10:17:34 +03:00
|
|
|
'linux/arm64/dist/kitty-{}-arm64.txz': 'Linux arm64 binary bundle',
|
2021-04-29 05:38:35 +03:00
|
|
|
}.items():
|
|
|
|
path = os.path.join('bypy', 'b', f.format(version))
|
|
|
|
if not os.path.exists(path):
|
|
|
|
raise SystemExit(f'The installer {path} does not exist')
|
|
|
|
files[path] = desc
|
2021-05-07 03:44:40 +03:00
|
|
|
signatures[path] = f'GPG signature for {desc}'
|
2022-11-14 14:37:32 +03:00
|
|
|
b = len(files)
|
2023-01-14 13:10:34 +03:00
|
|
|
for path in glob.glob('build/static/kitten-*'):
|
2022-11-16 11:15:27 +03:00
|
|
|
if path.endswith('.sig'):
|
|
|
|
continue
|
2022-11-14 14:37:32 +03:00
|
|
|
path = os.path.abspath(path)
|
2022-11-15 09:31:55 +03:00
|
|
|
exe_name = os.path.basename(path)
|
|
|
|
files[path] = f'Static {exe_name} executable'
|
|
|
|
signatures[path] = f'GPG signature for static {exe_name} executable'
|
2022-11-14 14:37:32 +03:00
|
|
|
if len(files) == b:
|
|
|
|
raise SystemExit('No static binaries found')
|
|
|
|
|
2018-06-08 08:06:33 +03:00
|
|
|
files[f'build/kitty-{version}.tar.xz'] = 'Source code'
|
2021-04-29 05:38:35 +03:00
|
|
|
files[f'build/kitty-{version}.tar.xz.sig'] = 'Source code GPG signature'
|
|
|
|
for path, desc in signatures.items():
|
|
|
|
sign_file(path)
|
2022-01-29 15:14:36 +03:00
|
|
|
files[f'{path}.sig'] = desc
|
2017-02-02 13:49:05 +03:00
|
|
|
for f in files:
|
|
|
|
if not os.path.exists(f):
|
2021-04-29 05:38:35 +03:00
|
|
|
raise SystemExit(f'The release artifact {f} does not exist')
|
2021-09-16 20:01:18 +03:00
|
|
|
return files
|
2021-09-16 19:43:01 +03:00
|
|
|
|
|
|
|
|
|
|
|
def run_upload(args: Any) -> None:
|
2017-02-02 13:49:05 +03:00
|
|
|
gd = get_github_data()
|
2021-09-16 19:43:01 +03:00
|
|
|
files = files_for_upload()
|
2017-02-02 13:54:38 +03:00
|
|
|
gh = GitHub(files, appname, version, gd['username'], gd['password'])
|
2017-02-02 13:49:05 +03:00
|
|
|
gh()
|
|
|
|
|
|
|
|
|
2021-09-16 19:43:01 +03:00
|
|
|
def run_upload_nightly(args: Any) -> None:
|
2021-09-16 20:01:18 +03:00
|
|
|
subprocess.check_call(['git', 'tag', '-f', 'nightly'])
|
|
|
|
subprocess.check_call(['git', 'push', 'origin', 'nightly', '-f'])
|
2021-09-16 19:43:01 +03:00
|
|
|
gd = get_github_data()
|
|
|
|
files = files_for_upload()
|
|
|
|
gh = GitHub(files, appname, 'nightly', gd['username'], gd['password'])
|
|
|
|
gh()
|
|
|
|
|
|
|
|
|
|
|
|
def current_branch() -> str:
|
|
|
|
return subprocess.check_output(['git', 'symbolic-ref', '--short', 'HEAD']).decode('utf-8').strip()
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def require_git_master(branch: str = 'master') -> None:
|
2021-09-16 19:43:01 +03:00
|
|
|
if current_branch() != branch:
|
2021-11-18 20:10:44 +03:00
|
|
|
raise SystemExit(f'You must be in the {branch} git branch')
|
2017-12-01 07:58:43 +03:00
|
|
|
|
|
|
|
|
2021-09-16 19:43:01 +03:00
|
|
|
def safe_read(path: str) -> str:
|
|
|
|
with suppress(FileNotFoundError):
|
|
|
|
with open(path) as f:
|
|
|
|
return f.read()
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def change_to_git_master() -> Generator[None, None, None]:
|
|
|
|
stash_ref_before = safe_read('.git/refs/stash')
|
|
|
|
subprocess.check_call(['git', 'stash'])
|
|
|
|
try:
|
|
|
|
branch_before = current_branch()
|
|
|
|
if branch_before != 'master':
|
|
|
|
subprocess.check_call(['git', 'switch', 'master'])
|
2022-07-16 07:48:05 +03:00
|
|
|
subprocess.check_call(['make', 'debug'])
|
2021-09-16 19:43:01 +03:00
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
if branch_before != 'master':
|
|
|
|
subprocess.check_call(['git', 'switch', branch_before])
|
2022-07-16 07:48:05 +03:00
|
|
|
subprocess.check_call(['make', 'debug'])
|
2021-09-16 19:43:01 +03:00
|
|
|
finally:
|
|
|
|
if stash_ref_before != safe_read('.git/refs/stash'):
|
|
|
|
subprocess.check_call(['git', 'stash', 'pop'])
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def require_penv() -> None:
|
2018-09-01 08:02:08 +03:00
|
|
|
if 'PENV' not in os.environ:
|
|
|
|
raise SystemExit('The PENV env var is not present, required for uploading releases')
|
|
|
|
|
|
|
|
|
2021-09-16 19:43:01 +03:00
|
|
|
def exec_actions(actions: Iterable[str], args: Any) -> None:
|
|
|
|
for action in actions:
|
|
|
|
print('Running', action)
|
|
|
|
cwd = os.getcwd()
|
2022-01-29 15:14:36 +03:00
|
|
|
globals()[f'run_{action}'](args)
|
2021-09-16 19:43:01 +03:00
|
|
|
os.chdir(cwd)
|
|
|
|
|
|
|
|
|
2020-03-14 12:07:11 +03:00
|
|
|
def main() -> None:
|
2022-02-21 08:48:21 +03:00
|
|
|
global building_nightly
|
2017-02-02 13:49:05 +03:00
|
|
|
parser = argparse.ArgumentParser(description='Publish kitty')
|
|
|
|
parser.add_argument(
|
|
|
|
'--only',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
2018-06-08 07:43:15 +03:00
|
|
|
help='Only run the specified action, by default the specified action and all sub-sequent actions are run')
|
2021-09-16 19:43:01 +03:00
|
|
|
parser.add_argument(
|
|
|
|
'--nightly',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
|
|
|
help='Upload a nightly release, ignores all other arguments')
|
2017-02-02 13:49:05 +03:00
|
|
|
parser.add_argument(
|
|
|
|
'action',
|
2018-07-10 19:35:33 +03:00
|
|
|
default='all',
|
2017-02-02 13:49:05 +03:00
|
|
|
nargs='?',
|
2021-09-16 20:01:18 +03:00
|
|
|
choices=list(ALL_ACTIONS) + ['all', 'upload_nightly'],
|
2017-02-02 13:49:05 +03:00
|
|
|
help='The action to start with')
|
|
|
|
args = parser.parse_args()
|
2021-09-16 19:43:01 +03:00
|
|
|
require_penv()
|
|
|
|
if args.nightly:
|
|
|
|
with change_to_git_master():
|
2022-02-21 08:48:21 +03:00
|
|
|
building_nightly = True
|
2021-09-16 19:43:01 +03:00
|
|
|
exec_actions(NIGHTLY_ACTIONS, args)
|
|
|
|
return
|
|
|
|
require_git_master()
|
2018-07-10 19:35:33 +03:00
|
|
|
if args.action == 'all':
|
|
|
|
actions = list(ALL_ACTIONS)
|
2021-09-16 20:01:18 +03:00
|
|
|
elif args.action == 'upload_nightly':
|
|
|
|
actions = ['upload_nightly']
|
2018-07-10 19:35:33 +03:00
|
|
|
else:
|
|
|
|
idx = ALL_ACTIONS.index(args.action)
|
|
|
|
actions = ALL_ACTIONS[idx:]
|
2017-02-02 13:49:05 +03:00
|
|
|
if args.only:
|
|
|
|
del actions[1:]
|
2017-12-01 07:58:43 +03:00
|
|
|
else:
|
|
|
|
try:
|
2021-11-18 20:10:44 +03:00
|
|
|
ans = input(f'Publish version \033[91m{version}\033[m (y/n): ')
|
2017-12-01 07:58:43 +03:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
ans = 'n'
|
|
|
|
if ans.lower() != 'y':
|
|
|
|
return
|
2020-10-05 17:34:32 +03:00
|
|
|
if actions == ['website']:
|
2021-07-17 07:29:46 +03:00
|
|
|
actions.insert(0, 'html')
|
2021-09-16 19:43:01 +03:00
|
|
|
exec_actions(actions, args)
|
2017-02-02 13:49:05 +03:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|