Nixify the build (#55)

This makes our build reproducible and consistent

* Disables dhall tests. They don't build in sandbox mode because they require network access
* Moves all scripts to ./scripts folder for consistency
* Moves all nix files to ./nix folder for consistency, except for release.nix which Hydra needs
* Changes convert.py to take the K8s swagger spec as an argument
* Get k8s swagger spec from nixpkgs
* Get dhall prelude from nixpkgs
* added an ./scripts/update-nixpkgs.sh script to easily update to a new version of nixpkgs
This commit is contained in:
Arian van Putten 2019-03-30 11:05:29 +01:00 committed by GitHub
parent f860c4e2a4
commit 8d295ecedc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 180 additions and 154 deletions

15
Makefile Normal file
View File

@ -0,0 +1,15 @@
.PHONY: install build check default
default: build
README.md: docs/README.md.dhall
./scripts/build-readme.sh
build: README.md
mkdir -p types default
./scripts/convert.py "${OPENAPI_SPEC}"
check: build
LC_ALL=en_US.UTF-8 ./scripts/check-source.py
mkdir -p tmp
LC_ALL=en_US.UTF-8 ./scripts/build-examples.py tmp
install: build
cp -r types default "${out}"
cp README.md "${out}"

View File

@ -290,6 +290,15 @@ metadata:
## Development
### Updating the nixpkgs snapshot (and kubernetes version)
Run
```bash
./scripts/update-nixpkgs.sh
./generate.sh
```
If the tests fail, rollback. If they don't then you have sucessfully upgraded!
### Tests
All tests are defined in `release.nix`. We run these tests in CI in a [Hydra
@ -301,12 +310,15 @@ You can run the tests locally with the following command:
nix build --file ./release.nix
```
### Changing the README
### Generating `types` `default` and `README.md`
Running `scripts/generate.sh` will generate all dhall files from the kubernetes
swagger specification, and copy them to `types` and `default`. It will also
generate `README.md` from `docs/README.md.dhall`.
If you make changes to `scripts/convert.py` or `docs/README.md.dhall`, you need
to run this command afterwards.
We build `README.md` from `docs/README.md.dhall` and check it into source control.
The build script `./scripts/build-readme.sh` inlines source code from the
`examples` directory. If you make changes to the readme or the examples you need
to run `scripts/build-readme.sh`.
## Projects Using `dhall-kubernetes`

View File

@ -123,6 +123,15 @@ ${../examples/out/ingressRaw.yaml as Text}
## Development
### Updating the nixpkgs snapshot (and kubernetes version)
Run
```bash
./scripts/update-nixpkgs.sh
./generate.sh
```
If the tests fail, rollback. If they don't then you have sucessfully upgraded!
### Tests
All tests are defined in `release.nix`. We run these tests in CI in a [Hydra
@ -134,12 +143,15 @@ You can run the tests locally with the following command:
nix build --file ./release.nix
```
### Changing the README
### Generating `types` `default` and `README.md`
Running `scripts/generate.sh` will generate all dhall files from the kubernetes
swagger specification, and copy them to `types` and `default`. It will also
generate `README.md` from `docs/README.md.dhall`.
If you make changes to `scripts/convert.py` or `docs/README.md.dhall`, you need
to run this command afterwards.
We build `README.md` from `docs/README.md.dhall` and check it into source control.
The build script `./scripts/build-readme.sh` inlines source code from the
`examples` directory. If you make changes to the readme or the examples you need
to run `scripts/build-readme.sh`.
## Projects Using `dhall-kubernetes`

View File

@ -15,6 +15,7 @@ mkDerivation {
sha256 = "9b22cc6f7694ef2f5d5d6fa66727044622b9905b2a9da0cdf376c75ad3b9df0e";
isLibrary = true;
isExecutable = true;
doCheck = false;
libraryHaskellDepends = [
aeson aeson-pretty ansi-terminal base bytestring case-insensitive
cborg cborg-json containers contravariant cryptonite Diff directory

21
nix/dhall-kubernetes.nix Normal file
View File

@ -0,0 +1,21 @@
{lib, stdenv, dhall, dhall-json, dhall-text, dhallPackages, kubernetes-openapi-spec, python3, glibcLocales}:
let
# Ignore generated files
ignoreOutputs = name: type: !(lib.elem name (map toString [../README.md ../types ../default]));
in
stdenv.mkDerivation {
name = "dhall-kubernetes";
DHALL_PRELUDE = "${dhallPackages.prelude}/package.dhall";
OPENAPI_SPEC = "${kubernetes-openapi-spec}";
doCheck = true;
buildInputs = [ dhall dhall-json dhall-text python3 glibcLocales ];
preBuild = ''
patchShebangs ./scripts/build-readme.sh
patchShebangs ./scripts/convert.py
'';
preCheck = ''
patchShebangs ./scripts/build-examples.py
patchShebangs ./scripts/check-source.py
'';
src = lib.cleanSourceWith {filter = ignoreOutputs; src = lib.cleanSource ./..;};
}

View File

@ -0,0 +1,9 @@
{stdenv, kubernetes}:
stdenv.mkDerivation {
name = "kubernetes-openapi-spec";
src = kubernetes.src;
phases = [ "unpackPhase" "installPhase" ];
installPhase = ''
cp api/openapi-spec/swagger.json $out
'';
}

7
nix/nixpkgs.json Normal file
View File

@ -0,0 +1,7 @@
{
"url": "https://github.com/nixos/nixpkgs-channels.git",
"rev": "07b42ccf2de451342982b550657636d891c4ba35",
"date": "2019-03-22T11:15:26+02:00",
"sha256": "1a7ga18pwq0y4p9r787622ry8gssw1p6wsr5l7dl8pqnj1hbbzwh",
"fetchSubmodules": false
}

24
nix/nixpkgs.nix Normal file
View File

@ -0,0 +1,24 @@
let
spec = builtins.fromJSON (builtins.readFile ./nixpkgs.json);
nixpkgs = builtins.fetchGit {
inherit (spec) url rev;
ref = "nixpkgs-unstable";
};
config = {
packageOverrides = pkgs: rec {
dhall-kubernetes = pkgs.callPackage ./dhall-kubernetes.nix {};
kubernetes-openapi-spec = pkgs.callPackage ./kubernetes-openapi-spec.nix {};
haskellPackages = pkgs.haskellPackages.override {
overrides = haskellPackagesNew: haskellPackagesOld: rec {
dhall = haskellPackagesNew.callPackage ./dhall-1.21.0.nix {};
dhall-json = haskellPackagesNew.callPackage ./dhall-json-1.2.7.nix {};
dhall-text = haskellPackagesNew.callPackage ./dhall-text-1.0.16.nix {};
megaparsec = haskellPackagesNew.callPackage ./megaparsec-7.0.2.nix {};
repline = haskellPackagesNew.callPackage ./repline-0.2.0.0.nix {};
};
};
};
};
in
import nixpkgs { inherit config; }

View File

@ -1,24 +0,0 @@
let
rev = "19013d809297cb9dbba69bda24e52a2833f4e05a";
outputSha256 = "148nqqyb39xmxlnw4vgqin2s7ywq51yi64d2hqmd6pk2gqnhmpv9";
nixpkgs = builtins.fetchTarball {
url = "https://github.com/NixOS/nixpkgs/archive/${rev}.tar.gz";
sha256 = outputSha256;
};
config = {
packageOverrides = pkgs: rec {
haskellPackages = pkgs.haskellPackages.override {
overrides = haskellPackagesNew: haskellPackagesOld: rec {
dhall = haskellPackagesNew.callPackage ./nix/dhall-1.21.0.nix {};
dhall-json = haskellPackagesNew.callPackage ./nix/dhall-json-1.2.7.nix {};
dhall-text = haskellPackagesNew.callPackage ./nix/dhall-text-1.0.16.nix {};
megaparsec = haskellPackagesNew.callPackage ./nix/megaparsec-7.0.2.nix {};
repline = haskellPackagesNew.callPackage ./nix/repline-0.2.0.0.nix {};
};
};
};
};
in
import nixpkgs { inherit config; }

View File

@ -1,65 +1,17 @@
{ src ? { rev = ""; }, ... }:
let
pkgs = import ./nixpkgs.nix;
nativeBuildInputs = [
pkgs.git
pkgs.python3
pkgs.bash
pkgs.dhall
pkgs.dhall-json
pkgs.dhall-text
pkgs.glibcLocales
];
runCommand = name: script:
pkgs.runCommand name { inherit nativeBuildInputs; } script;
generatedFilesCompile =
runCommand
"generated-files-compile"
''
cd ${./.}
LC_ALL=en_US.UTF-8 ./scripts/check-source.py
touch $out
'';
buildReadme =
runCommand
"build-readme"
''
cd ${./.}
./scripts/build-readme.sh $out
diff --unified --color=always README.md $out
'';
validateExamples =
runCommand
"validate-examples"
''
cd ${./.}
mkdir $out
LC_ALL=en_US.UTF-8 ./scripts/build-examples.py $out
'';
{ pkgs ? import ./nix/nixpkgs.nix
, src ? { rev = ""; }
, ...
}:
rec {
inherit (pkgs) dhall-kubernetes;
# Derivation that trivially depends on the input source code revision.
# As this is included in the "dhall-lang" aggregate, it forces every
# commit to have a corresponding GitHub status check, even if the
# commit doesn't make any changes (which can happen when merging
# master in).
rev = pkgs.runCommand "rev" {} ''echo "${src.rev}" > $out'';
in {
dhall-kubernetes = pkgs.releaseTools.aggregate {
name = "dhall-kubernetes";
constituents = [
generatedFilesCompile
validateExamples
buildReadme
rev
];
aggregate = pkgs.releaseTools.aggregate {
name = "dhall-kubernetes-agggregate";
constituents = [ dhall-kubernetes rev ];
};
}

View File

@ -1,12 +1,8 @@
#!/usr/bin/env python3
import requests
#! /usr/bin/env python3
import json
import re
import sys
kubernetes_tag = 'v1.13.4'
url = \
'https://raw.githubusercontent.com/kubernetes/kubernetes/{tag}/api/openapi-spec/swagger.json' \
.format(tag=kubernetes_tag)
# See https://kubernetes.io/docs/concepts/overview/working-with-objects/kubernetes-objects/#required-fields
# because k8s API allows PUTS etc with partial data, it's not clear from the data types OR the API which
@ -132,53 +128,53 @@ def labelize(propName):
def main():
spec = requests.get(url).json()
for modelName, modelSpec in spec['definitions'].items():
with open('types/' + modelName + '.dhall', 'w') as f:
f.write('{}\n'.format(build_type(modelSpec, '.', modelName)))
with open('default/' + modelName + '.dhall', 'w') as f:
if 'type' in modelSpec:
typ = build_type(modelSpec, '../types')
# In case we have a union, we make the constructors for it
if typ[0] == '<':
f.write('{}\n'.format(typ))
# Otherwise we just output the identity
with open(sys.argv[1]) as specf:
spec = json.load(specf)
for modelName, modelSpec in spec['definitions'].items():
with open('types/' + modelName + '.dhall', 'w') as f:
f.write('{}\n'.format(build_type(modelSpec, '.', modelName)))
with open('default/' + modelName + '.dhall', 'w') as f:
if 'type' in modelSpec:
typ = build_type(modelSpec, '../types')
# In case we have a union, we make the constructors for it
if typ[0] == '<':
f.write('{}\n'.format(typ))
# Otherwise we just output the identity
else:
f.write('\(a : {}) -> a\n'.format(typ))
elif '$ref' in modelSpec:
path = schema_path_from_ref('.', modelSpec['$ref'])
f.write('{}\n'.format(path))
else:
f.write('\(a : {}) -> a\n'.format(typ))
elif '$ref' in modelSpec:
path = schema_path_from_ref('.', modelSpec['$ref'])
f.write('{}\n'.format(path))
else:
required = required_properties(modelName, modelSpec)
if modelName in required_for.keys():
required |= required_for[modelName]
required = required_properties(modelName, modelSpec)
if modelName in required_for.keys():
required |= required_for[modelName]
properties = modelSpec.get('properties', {})
properties = modelSpec.get('properties', {})
resource_data = get_static_data(modelSpec)
param_names = required - set(resource_data.keys())
resource_data = get_static_data(modelSpec)
param_names = required - set(resource_data.keys())
# If there's multiple required props, we make it a lambda
requiredProps = [k for k in properties if k in required]
if len(requiredProps) > 0:
params = ['{} : ({})'.format(labelize(propName), build_type(propVal, '../types'))
for propName, propVal in properties.items()
if propName in param_names]
f.write('\(_params : {' + ', '.join(params) + '}) ->\n')
# If there's multiple required props, we make it a lambda
requiredProps = [k for k in properties if k in required]
if len(requiredProps) > 0:
params = ['{} : ({})'.format(labelize(propName), build_type(propVal, '../types'))
for propName, propVal in properties.items()
if propName in param_names]
f.write('\(_params : {' + ', '.join(params) + '}) ->\n')
# If it's required we're passing it in as a parameter
KVs = [(propName, "_params." + propName)
if propName in param_names
else (propName, get_default(propDef, propName in required, resource_data.get(propName, None)))
for propName, propDef in properties.items()]
# If it's required we're passing it in as a parameter
KVs = [(propName, "_params." + propName)
if propName in param_names
else (propName, get_default(propDef, propName in required, resource_data.get(propName, None)))
for propName, propDef in properties.items()]
# If there's no fields, should be an empty record
if len(KVs) > 0:
formatted = [" {} = {}\n".format(labelize(k), v) for k, v in KVs]
else:
formatted = '='
f.write('{' + ','.join(formatted) + '} : ../types/' + modelName + '.dhall\n')
# If there's no fields, should be an empty record
if len(KVs) > 0:
formatted = [" {} = {}\n".format(labelize(k), v) for k, v in KVs]
else:
formatted = '='
f.write('{' + ','.join(formatted) + '} : ../types/' + modelName + '.dhall\n')
if __name__ == '__main__':
main()

12
scripts/generate.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/sh
rm -rf ./default ./types
if dir=$(nix-build release.nix -A dhall-kubernetes --no-out-link); then
cp -r "$dir"/default .
chmod u+w ./default
cp -r "$dir"/types .
chmod u+w ./types
cp "$dir/README.md" README.md
chmod u+w ./README.md
fi

4
scripts/update-nixpkgs.sh Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p nix-prefetch-git
nix-prefetch-git https://github.com/nixos/nixpkgs-channels.git --rev refs/heads/nixos-unstable > ./nix/nixpkgs.json

View File

@ -1,15 +0,0 @@
let
pkgs = import ./nixpkgs.nix;
in
pkgs.stdenv.mkDerivation {
name = "dhall-kubernetes-shell";
buildInputs = [
pkgs.git
pkgs.dhall
pkgs.dhall-json
pkgs.dhall-text
pkgs.python3
pkgs.python3Packages.requests
pkgs.glibcLocales
];
}