Merge pull request #81164 from Mic92/home-assistant

nixos/home-assistant: 0.104.3 -> 0.106.0
This commit is contained in:
Jörg Thalheim 2020-03-02 10:55:35 +00:00 committed by GitHub
commit 2c5ffb5c7a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 1135 additions and 982 deletions

View File

@ -96,7 +96,20 @@ in {
config = mkOption {
default = null;
type = with types; nullOr attrs;
# Migrate to new option types later: https://github.com/NixOS/nixpkgs/pull/75584
type = with lib.types; let
valueType = nullOr (oneOf [
bool
int
float
str
(lazyAttrsOf valueType)
(listOf valueType)
]) // {
description = "Yaml value";
emptyValue.value = {};
};
in valueType;
example = literalExample ''
{
homeassistant = {

View File

@ -2,13 +2,13 @@
buildPythonPackage rec {
pname = "hass-nabucasa";
version = "0.29";
version = "0.31";
src = fetchFromGitHub {
owner = "nabucasa";
repo = pname;
rev = version;
sha256 = "182nh5i3hlj0kqkbynk69md0ddq83w02l8lz4m03d8xbjixzi1k1";
sha256 = "0hxdvdj41gq5ryafjhrcgf6y8l33lyf45a1vgwwbk0q29sir9bnr";
};
# upstreamed in https://github.com/NabuCasa/hass-nabucasa/pull/119

View File

@ -0,0 +1,21 @@
{ lib
, buildPythonPackage
, fetchPypi
}:
buildPythonPackage rec {
pname = "jsonpath";
version = "0.82";
src = fetchPypi {
inherit pname version;
sha256 = "46d3fd2016cd5b842283d547877a02c418a0fe9aa7a6b0ae344115a2c990fef4";
};
meta = with lib; {
description = "An XPath for JSON";
homepage = "https://github.com/json-path/JsonPath";
license = licenses.mit;
maintainers = [ maintainers.mic92 ];
};
}

View File

@ -0,0 +1,38 @@
{ lib
, buildPythonPackage
, fetchPypi
, requests
, six
, tox
, pytest
, pythonOlder
}:
buildPythonPackage rec {
pname = "pushover-complete";
version = "1.1.1";
src = fetchPypi {
pname = "pushover_complete";
inherit version;
sha256 = "8a8f867e1f27762a28a0832c33c6003ca54ee04c935678d124b4c071f7cf5a1f";
};
propagatedBuildInputs = [
requests
six
];
checkInputs = [ pytest tox ];
# Fails also on their travis right now:
# - https://travis-ci.org/scolby33/pushover_complete/builds?utm_medium=notification&utm_source=github_status
doCheck = pythonOlder "3.7";
meta = with lib; {
description = "A Python package for interacting with *all* aspects of the Pushover API";
homepage = https://github.com/scolby33/pushover_complete;
license = licenses.mit;
maintainers = [ maintainers.mic92 ];
};
}

View File

@ -10,15 +10,16 @@
, certifi
, bitstring
, unittest2
, future
}:
buildPythonPackage rec {
pname = "pyicloud";
version = "0.9.1";
version = "0.9.2";
src = fetchPypi {
inherit pname version;
sha256 = "580b52e95f67a41ed86c56a514aa2b362f53fbaf23f16c69fb24e0d19fd373ee";
sha256 = "1jjkzf7vwms6pymnmdr893830vrymxnq455xnqp21wqhjjiy2amd";
};
propagatedBuildInputs = [
@ -30,6 +31,7 @@ buildPythonPackage rec {
tzlocal
certifi
bitstring
future
];
checkInputs = [ unittest2 ];

File diff suppressed because it is too large Load Diff

View File

@ -67,7 +67,7 @@ let
extraBuildInputs = extraPackages py.pkgs;
# Don't forget to run parse-requirements.py after updating
hassVersion = "0.104.3";
hassVersion = "0.106.1";
in with py.pkgs; buildPythonApplication rec {
pname = "homeassistant";
@ -75,6 +75,8 @@ in with py.pkgs; buildPythonApplication rec {
disabled = pythonOlder "3.5";
patches = [ ./relax-importlib-metadata-pyaml.patch ];
inherit availableComponents;
# PyPI tarball is missing tests/ directory
@ -82,7 +84,7 @@ in with py.pkgs; buildPythonApplication rec {
owner = "home-assistant";
repo = "home-assistant";
rev = version;
sha256 = "06bh9qrpa1d370pvw6in0isg3yw4p7gh9rpy4hm96p0mf53vxfdp";
sha256 = "0i261hzjfhqnq7j8dwsnj2h2vmr4vnxkvk2ff910am1knzni3a2z";
};
propagatedBuildInputs = [
@ -95,7 +97,8 @@ in with py.pkgs; buildPythonApplication rec {
] ++ componentBuildInputs ++ extraBuildInputs;
checkInputs = [
asynctest pytest pytest-aiohttp requests-mock pydispatcher aiohue netdisco hass-nabucasa
asynctest pytest pytest-aiohttp requests-mock pydispatcher aiohue netdisco
hass-nabucasa defusedxml
];
postPatch = ''

View File

@ -4,11 +4,11 @@ buildPythonPackage rec {
# the frontend version corresponding to a specific home-assistant version can be found here
# https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/frontend/manifest.json
pname = "home-assistant-frontend";
version = "20200108.0";
version = "20200220.4";
src = fetchPypi {
inherit pname version;
sha256 = "1h6fgkx8fffzs829893gjbh0wbjgxjzz2ca64v8r5sb938bfayg8";
sha256 = "0cb8b6xizxz1q5r0qgwsqs53va9bxdqnp4vwf5lh8ppv9zy7hssc";
};
# no Python tests implemented

View File

@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell
#! nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ attrs ])
#! nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ mypy attrs ])
#
# This script downloads Home Assistant's source tarball.
# Inside the homeassistant/components directory, each integration has an associated manifest.json,
@ -15,125 +15,169 @@
# nixpkgs' python3Packages are searched for appropriate names.
# Then, a Nix attribute set mapping integration name to dependencies is created.
from io import BytesIO
import json
import pathlib
import os
import pathlib
import re
import subprocess
import sys
import tempfile
import tarfile
import tempfile
from io import BytesIO
from typing import Dict, Optional
from urllib.request import urlopen
COMPONENT_PREFIX = 'homeassistant.components'
PKG_SET = 'python3Packages'
COMPONENT_PREFIX = "homeassistant.components"
PKG_SET = "python3Packages"
# If some requirements are matched by multiple python packages,
# the following can be used to choose one of them
PKG_PREFERENCES = {
# Use python3Packages.youtube-dl-light instead of python3Packages.youtube-dl
'youtube-dl': 'youtube-dl-light',
'tensorflow-bin': 'tensorflow',
'tensorflowWithoutCuda': 'tensorflow'
"youtube-dl": "youtube-dl-light",
"tensorflow-bin": "tensorflow",
"tensorflowWithoutCuda": "tensorflow",
}
def run_mypy() -> None:
cmd = ["mypy", "--ignore-missing-imports", __file__]
print(f"$ {' '.join(cmd)}")
subprocess.run(cmd, check=True)
def get_version():
with open(os.path.dirname(sys.argv[0]) + '/default.nix') as f:
with open(os.path.dirname(sys.argv[0]) + "/default.nix") as f:
# A version consists of digits, dots, and possibly a "b" (for beta)
m = re.search('hassVersion = "([\\d\\.b]+)";', f.read())
return m.group(1)
def parse_components(version='master'):
def parse_components(version="master"):
components = {}
with tempfile.TemporaryDirectory() as tmp:
with urlopen(f'https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz') as response:
with urlopen(
f"https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz"
) as response:
tarfile.open(fileobj=BytesIO(response.read())).extractall(tmp)
# Use part of a script from the Home Assistant codebase
sys.path.append(os.path.join(tmp, f'home-assistant-{version}'))
sys.path.append(os.path.join(tmp, f"home-assistant-{version}"))
from script.hassfest.model import Integration
integrations = Integration.load_dir(pathlib.Path(
os.path.join(tmp, f'home-assistant-{version}', 'homeassistant/components')
))
integrations = Integration.load_dir(
pathlib.Path(
os.path.join(
tmp, f"home-assistant-{version}", "homeassistant/components"
)
)
)
for domain in sorted(integrations):
integration = integrations[domain]
components[domain] = integration.manifest
return components
# Recursively get the requirements of a component and its dependencies
def get_reqs(components, component):
requirements = set(components[component]['requirements'])
for dependency in components[component]['dependencies']:
requirements = set(components[component]["requirements"])
for dependency in components[component]["dependencies"]:
requirements.update(get_reqs(components, dependency))
return requirements
# Store a JSON dump of Nixpkgs' python3Packages
output = subprocess.check_output(['nix-env', '-f', os.path.dirname(sys.argv[0]) + '/../../..', '-qa', '-A', PKG_SET, '--json'])
packages = json.loads(output)
def name_to_attr_path(req):
def dump_packages() -> Dict[str, Dict[str, str]]:
# Store a JSON dump of Nixpkgs' python3Packages
output = subprocess.check_output(
[
"nix-env",
"-f",
os.path.dirname(sys.argv[0]) + "/../../..",
"-qa",
"-A",
PKG_SET,
"--json",
]
)
return json.loads(output)
def name_to_attr_path(req: str, packages: Dict[str, Dict[str, str]]) -> Optional[str]:
attr_paths = set()
names = [req]
# E.g. python-mpd2 is actually called python3.6-mpd2
# instead of python-3.6-python-mpd2 inside Nixpkgs
if req.startswith('python-') or req.startswith('python_'):
names.append(req[len('python-'):])
if req.startswith("python-") or req.startswith("python_"):
names.append(req[len("python-") :])
for name in names:
# treat "-" and "_" equally
name = re.sub('[-_]', '[-_]', name)
pattern = re.compile('^python\\d\\.\\d-{}-\\d'.format(name), re.I)
name = re.sub("[-_]", "[-_]", name)
pattern = re.compile("^python\\d\\.\\d-{}-\\d".format(name), re.I)
for attr_path, package in packages.items():
if pattern.match(package['name']):
if pattern.match(package["name"]):
attr_paths.add(attr_path)
if len(attr_paths) > 1:
for to_replace, replacement in PKG_PREFERENCES.items():
try:
attr_paths.remove(PKG_SET + '.' + to_replace)
attr_paths.add(PKG_SET + '.' + replacement)
attr_paths.remove(PKG_SET + "." + to_replace)
attr_paths.add(PKG_SET + "." + replacement)
except KeyError:
pass
# Let's hope there's only one derivation with a matching name
assert len(attr_paths) <= 1, "{} matches more than one derivation: {}".format(req, attr_paths)
assert len(attr_paths) <= 1, "{} matches more than one derivation: {}".format(
req, attr_paths
)
if len(attr_paths) == 1:
return attr_paths.pop()
else:
return None
version = get_version()
print('Generating component-packages.nix for version {}'.format(version))
components = parse_components(version=version)
build_inputs = {}
for component in sorted(components.keys()):
attr_paths = []
missing_reqs = []
reqs = sorted(get_reqs(components, component))
for req in reqs:
# Some requirements are specified by url, e.g. https://example.org/foobar#xyz==1.0.0
# Therefore, if there's a "#" in the line, only take the part after it
req = req[req.find('#') + 1:]
name = req.split('==')[0]
attr_path = name_to_attr_path(name)
if attr_path is not None:
# Add attribute path without "python3Packages." prefix
attr_paths.append(attr_path[len(PKG_SET + '.'):])
else:
missing_reqs.append(name)
else:
build_inputs[component] = attr_paths
n_diff = len(reqs) > len(build_inputs[component])
if n_diff > 0:
print("Component {} is missing {} dependencies".format(component, n_diff))
print("missing requirements: {}".format(missing_reqs))
with open(os.path.dirname(sys.argv[0]) + '/component-packages.nix', 'w') as f:
f.write('# Generated by parse-requirements.py\n')
f.write('# Do not edit!\n\n')
f.write('{\n')
f.write(' version = "{}";\n'.format(version))
f.write(' components = {\n')
for component, attr_paths in build_inputs.items():
f.write(' "{}" = ps: with ps; [ '.format(component))
f.write(' '.join(attr_paths))
f.write(' ];\n')
f.write(' };\n')
f.write('}\n')
def main() -> None:
packages = dump_packages()
version = get_version()
print("Generating component-packages.nix for version {}".format(version))
components = parse_components(version=version)
build_inputs = {}
for component in sorted(components.keys()):
attr_paths = []
missing_reqs = []
reqs = sorted(get_reqs(components, component))
for req in reqs:
# Some requirements are specified by url, e.g. https://example.org/foobar#xyz==1.0.0
# Therefore, if there's a "#" in the line, only take the part after it
req = req[req.find("#") + 1 :]
name = req.split("==")[0]
attr_path = name_to_attr_path(name, packages)
if attr_path is not None:
# Add attribute path without "python3Packages." prefix
attr_paths.append(attr_path[len(PKG_SET + ".") :])
else:
missing_reqs.append(name)
else:
build_inputs[component] = (attr_paths, missing_reqs)
n_diff = len(reqs) > len(build_inputs[component])
if n_diff > 0:
print("Component {} is missing {} dependencies".format(component, n_diff))
print("missing requirements: {}".format(missing_reqs))
with open(os.path.dirname(sys.argv[0]) + "/component-packages.nix", "w") as f:
f.write("# Generated by parse-requirements.py\n")
f.write("# Do not edit!\n\n")
f.write("{\n")
f.write(f' version = "{version}";\n')
f.write(" components = {\n")
for component, deps in build_inputs.items():
available, missing = deps
f.write(f' "{component}" = ps: with ps; [ ')
f.write(" ".join(available))
f.write("];")
if len(missing) > 0:
f.write(f" # missing inputs: {' '.join(missing)}")
f.write("\n")
f.write(" };\n")
f.write("}\n")
if __name__ == "__main__":
run_mypy()
main()

View File

@ -0,0 +1,22 @@
diff --git a/setup.py b/setup.py
index 7f9155d9a..f90a0d965 100755
--- a/setup.py
+++ b/setup.py
@@ -38,7 +38,7 @@ REQUIRES = [
"attrs==19.3.0",
"bcrypt==3.1.7",
"certifi>=2019.11.28",
- "importlib-metadata==1.5.0",
+ "importlib-metadata>=1.3.0",
"jinja2>=2.10.3",
"PyJWT==1.7.1",
# PyJWT has loose dependency. We want the latest one.
@@ -46,7 +46,7 @@ REQUIRES = [
"pip>=8.0.3",
"python-slugify==4.0.0",
"pytz>=2019.03",
- "pyyaml==5.3",
+ "pyyaml>=5.2",
"requests==2.22.0",
"ruamel.yaml==0.15.100",
"voluptuous==0.11.7",

View File

@ -807,6 +807,8 @@ in {
jira = callPackage ../development/python-modules/jira { };
jsonpath = callPackage ../development/python-modules/jsonpath { };
junit-xml = callPackage ../development/python-modules/junit-xml { };
junitparser = callPackage ../development/python-modules/junitparser { };
@ -979,6 +981,8 @@ in {
pdfx = callPackage ../development/python-modules/pdfx { };
pushover-complete = callPackage ../development/python-modules/pushover-complete { };
pyicloud = callPackage ../development/python-modules/pyicloud { };
pyperf = callPackage ../development/python-modules/pyperf { };