2017-01-29 00:29:00 +03:00
|
|
|
#!/usr/bin/env nix-shell
|
2020-10-26 00:47:18 +03:00
|
|
|
#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ requests pyquery click ])"
|
2017-01-29 00:29:00 +03:00
|
|
|
|
|
|
|
# To use, just execute this script with --help to display help.
|
|
|
|
|
|
|
|
import subprocess
|
|
|
|
import json
|
2017-03-02 13:45:36 +03:00
|
|
|
import sys
|
2017-01-29 00:29:00 +03:00
|
|
|
|
|
|
|
import click
|
|
|
|
import requests
|
|
|
|
from pyquery import PyQuery as pq
|
|
|
|
|
2019-09-14 06:33:38 +03:00
|
|
|
def map_dict (f, d):
|
|
|
|
for k,v in d.items():
|
|
|
|
d[k] = f(v)
|
2017-01-29 00:29:00 +03:00
|
|
|
|
|
|
|
maintainers_json = subprocess.check_output([
|
2019-09-14 06:33:38 +03:00
|
|
|
'nix-instantiate', '-A', 'lib.maintainers', '--eval', '--strict', '--json'
|
2017-09-24 12:31:59 +03:00
|
|
|
])
|
2017-01-29 00:29:00 +03:00
|
|
|
maintainers = json.loads(maintainers_json)
|
2019-09-14 06:33:38 +03:00
|
|
|
MAINTAINERS = map_dict(lambda v: v.get('github', None), maintainers)
|
2017-01-29 00:29:00 +03:00
|
|
|
|
|
|
|
def get_response_text(url):
|
|
|
|
return pq(requests.get(url).text) # IO
|
|
|
|
|
|
|
|
EVAL_FILE = {
|
|
|
|
'nixos': 'nixos/release.nix',
|
|
|
|
'nixpkgs': 'pkgs/top-level/release.nix',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def get_maintainers(attr_name):
|
2017-08-28 21:56:10 +03:00
|
|
|
try:
|
|
|
|
nixname = attr_name.split('.')
|
|
|
|
meta_json = subprocess.check_output([
|
|
|
|
'nix-instantiate',
|
|
|
|
'--eval',
|
|
|
|
'--strict',
|
|
|
|
'-A',
|
|
|
|
'.'.join(nixname[1:]) + '.meta',
|
|
|
|
EVAL_FILE[nixname[0]],
|
2019-09-14 06:33:38 +03:00
|
|
|
'--arg',
|
|
|
|
'nixpkgs',
|
|
|
|
'./.',
|
2017-08-28 21:56:10 +03:00
|
|
|
'--json'])
|
|
|
|
meta = json.loads(meta_json)
|
2019-09-14 06:33:38 +03:00
|
|
|
return meta.get('maintainers', [])
|
2017-08-28 21:56:10 +03:00
|
|
|
except:
|
|
|
|
return []
|
2017-01-29 00:29:00 +03:00
|
|
|
|
2019-09-14 06:33:38 +03:00
|
|
|
def filter_github_users(maintainers):
|
|
|
|
github_only = []
|
|
|
|
for i in maintainers:
|
|
|
|
if i.get('github'):
|
|
|
|
github_only.append(i)
|
|
|
|
return github_only
|
|
|
|
|
2018-04-21 23:30:24 +03:00
|
|
|
def print_build(table_row):
|
|
|
|
a = pq(table_row)('a')[1]
|
|
|
|
print("- [ ] [{}]({})".format(a.text, a.get('href')), flush=True)
|
2019-09-14 06:33:38 +03:00
|
|
|
|
|
|
|
job_maintainers = filter_github_users(get_maintainers(a.text))
|
|
|
|
if job_maintainers:
|
|
|
|
print(" - maintainers: {}".format(" ".join(map(lambda u: '@' + u.get('github'), job_maintainers))))
|
2018-04-21 23:30:24 +03:00
|
|
|
# TODO: print last three persons that touched this file
|
|
|
|
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
|
2019-09-14 06:33:38 +03:00
|
|
|
|
2018-04-21 23:30:24 +03:00
|
|
|
sys.stdout.flush()
|
2017-01-29 00:29:00 +03:00
|
|
|
|
|
|
|
@click.command()
|
|
|
|
@click.option(
|
|
|
|
'--jobset',
|
2019-09-14 06:33:38 +03:00
|
|
|
default="nixos/release-19.09",
|
|
|
|
help='Hydra project like nixos/release-19.09')
|
2017-01-29 00:29:00 +03:00
|
|
|
def cli(jobset):
|
|
|
|
"""
|
|
|
|
Given a Hydra project, inspect latest evaluation
|
|
|
|
and print a summary of failed builds
|
|
|
|
"""
|
|
|
|
|
2020-04-18 23:51:19 +03:00
|
|
|
url = "https://hydra.nixos.org/jobset/{}".format(jobset)
|
2017-01-29 00:29:00 +03:00
|
|
|
|
|
|
|
# get the last evaluation
|
|
|
|
click.echo(click.style(
|
|
|
|
'Getting latest evaluation for {}'.format(url), fg='green'))
|
|
|
|
d = get_response_text(url)
|
|
|
|
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
|
|
|
|
latest_eval_url = evaluations[0].get('href')
|
|
|
|
|
|
|
|
# parse last evaluation page
|
|
|
|
click.echo(click.style(
|
|
|
|
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
|
|
|
|
d = get_response_text(latest_eval_url + '?full=1')
|
|
|
|
|
|
|
|
# TODO: aborted evaluations
|
|
|
|
# TODO: dependency failed without propagated builds
|
2018-04-21 23:30:24 +03:00
|
|
|
print('\nFailures:')
|
2017-01-29 00:29:00 +03:00
|
|
|
for tr in d('img[alt="Failed"]').parents('tr'):
|
2018-04-21 23:30:24 +03:00
|
|
|
print_build(tr)
|
2017-01-29 00:29:00 +03:00
|
|
|
|
2018-04-21 23:30:24 +03:00
|
|
|
print('\nDependency failures:')
|
|
|
|
for tr in d('img[alt="Dependency failed"]').parents('tr'):
|
|
|
|
print_build(tr)
|
2017-03-02 13:45:36 +03:00
|
|
|
|
2017-01-29 00:29:00 +03:00
|
|
|
|
2019-09-14 06:33:38 +03:00
|
|
|
|
2017-01-29 00:29:00 +03:00
|
|
|
if __name__ == "__main__":
|
|
|
|
try:
|
|
|
|
cli()
|
2018-04-21 23:30:24 +03:00
|
|
|
except Exception as e:
|
2017-01-29 00:29:00 +03:00
|
|
|
import pdb;pdb.post_mortem()
|