build and distribute

This commit is contained in:
Graham Christensen 2023-05-24 22:17:28 -04:00
parent 5efcd4d923
commit a1387522d9
8 changed files with 549 additions and 72 deletions

42
.github/workflows/build.yaml vendored Normal file
View File

@ -0,0 +1,42 @@
name: Build artifacts
on:
workflow_dispatch:
workflow_call:
jobs:
build-artifacts-X64-macOS:
runs-on: macos-12
steps:
- uses: actions/checkout@v3
- uses: DeterminateSystems/nix-installer-action-cache@main
- name: Build package
run: "nix build .# -L"
- name: Upload a Build Artifact
uses: actions/upload-artifact@v3.1.2
with:
# Artifact name
name: flake-checker-X64-macOS
path: result/bin/flake-checker
retention-days: 1
build-artifacts-X64-Linux:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: DeterminateSystems/nix-installer-action-cache@main
- name: Build package
run: "nix build .# -L"
- name: Upload a Build Artifact
uses: actions/upload-artifact@v3.1.2
with:
# Artifact name
name: flake-checker-X64-Linux
path: result/bin/flake-checker
retention-days: 1

52
.github/workflows/release-branches.yml vendored Normal file
View File

@ -0,0 +1,52 @@
name: Release Branch
on:
push:
branches:
# NOTE: make sure any branches here are also valid directory names,
# otherwise creating the directory and uploading to s3 will fail
- "main"
jobs:
build:
uses: ./.github/workflows/build.yaml
release:
needs: build
concurrency: release
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write # In order to request a JWT for AWS auth
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
aws-region: us-east-2
- name: Create the artifacts directory
run: rm -rf ./artifacts && mkdir ./artifacts
- uses: actions/download-artifact@v3
with:
name: flake-checker-X64-macOS
path: cache-binary-X64-macOS
- name: Persist the cache binary
run: cp ./cache-binary-X64-macOS/flake-checker ./artifacts/flake-checker-X64-macOS
- uses: actions/download-artifact@v3
with:
name: flake-checker-X64-Linux
path: cache-binary-X64-Linux
- name: Persist the cache binary
run: cp ./cache-binary-X64-Linux/flake-checker ./artifacts/flake-checker-X64-Linux
- name: Publish Release (Branch)
env:
AWS_BUCKET: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
run: |
.github/workflows/upload_s3.sh branch "${{ github.ref_name }}" "$GITHUB_SHA"

61
.github/workflows/release-prs.yml vendored Normal file
View File

@ -0,0 +1,61 @@
name: Release PR
on:
pull_request:
types:
- opened
- reopened
- synchronize
- labeled
jobs:
build:
uses: ./.github/workflows/build.yaml
release:
needs: build
concurrency: release
# Only intra-repo PRs are allowed to have PR artifacts uploaded
# We only want to trigger once the upload once in the case the upload label is added, not when any label is added
if: |
github.event.pull_request.head.repo.full_name == 'DeterminateSystems/flake-checker'
&& (
(github.event.action == 'labeled' && github.event.label.name == 'upload to s3')
|| (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'upload to s3'))
)
runs-on: ubuntu-latest
permissions:
id-token: write # In order to request a JWT for AWS auth
contents: read
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Create the artifacts directory
run: rm -rf ./artifacts && mkdir ./artifacts
- uses: actions/download-artifact@v3
with:
name: flake-checker-X64-macOS
path: cache-binary-X64-macOS
- name: Persist the cache binary
run: cp ./cache-binary-X64-macOS/flake-checker ./artifacts/flake-checker-X64-macOS
- uses: actions/download-artifact@v3
with:
name: flake-checker-X64-Linux
path: cache-binary-X64-Linux
- name: Persist the cache binary
run: cp ./cache-binary-X64-Linux/flake-checker ./artifacts/flake-checker-X64-Linux
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
aws-region: us-east-2
- name: Publish Release (PR)
env:
AWS_BUCKET: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
run: |
.github/workflows/upload_s3.sh pr "${{ github.event.pull_request.number }}" "${{ github.event.pull_request.head.sha }}"

58
.github/workflows/release-tags.yml vendored Normal file
View File

@ -0,0 +1,58 @@
name: Release Tags
on:
push:
tags:
- "v*.*.*"
jobs:
build:
uses: ./.github/workflows/build.yaml
release:
needs: build
concurrency: release
runs-on: ubuntu-latest
permissions:
contents: write # In order to upload artifacts to GitHub releases
id-token: write # In order to request a JWT for AWS auth
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Create the artifacts directory
run: rm -rf ./artifacts && mkdir ./artifacts
- uses: actions/download-artifact@v3
with:
name: flake-checker-X64-macOS
path: cache-binary-X64-macOS
- name: Persist the cache binary
run: cp ./cache-binary-X64-macOS/flake-checker ./artifacts/flake-checker-X64-macOS
- uses: actions/download-artifact@v3
with:
name: flake-checker-X64-Linux
path: cache-binary-X64-Linux
- name: Persist the cache binary
run: cp ./cache-binary-X64-Linux/flake-checker ./artifacts/flake-checker-X64-Linux
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ secrets.AWS_S3_UPLOAD_ROLE }}
aws-region: us-east-2
- name: Publish Release to S3 (Tag)
env:
AWS_BUCKET: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
run: |
./upload_s3.sh "tag" "$GITHUB_REF_NAME" "$GITHUB_SHA"
- name: Publish Release to GitHub (Tag)
uses: softprops/action-gh-release@v1
with:
fail_on_unmatched_files: true
draft: true
files: |
artifacts/**
nix-installer.sh

96
.github/workflows/upload_s3.sh vendored Executable file
View File

@ -0,0 +1,96 @@
set -eu
TYPE="$1"
TYPE_ID="$2"
GIT_ISH="$3"
if [ "$TYPE" == "tag" ]; then
DEST="${TYPE_ID}"
else
DEST="${TYPE}_${TYPE_ID}"
fi
is_tag() {
if [[ "$GITHUB_REF_TYPE" == "tag" ]]; then
return 0
else
return 1
fi
}
# If the revision directory has already been created in S3 somehow, we don't want to reupload
if aws s3 ls "$AWS_BUCKET"/"$GIT_ISH"/; then
# Only exit if it's not a tag (since we're tagging a commit previously pushed to main)
if ! is_tag; then
echo "Revision $GIT_ISH was already uploaded; exiting"
exit 1
fi
fi
mkdir "$DEST"
mkdir "$GIT_ISH"
for artifact in $(find artifacts/ -type f); do
chmod +x "$artifact"
cp "$artifact" "$DEST"/
cp "$artifact" "$GIT_ISH"/
done
# If any artifact already exists in S3 and the hash is the same, we don't want to reupload
check_reupload() {
dest="$1"
for file in $(find "$dest" -type f); do
artifact_path="$dest"/"$(basename "$artifact")"
md5="$(md5sum "$artifact" | cut -d' ' -f1)"
obj="$(aws s3api head-object --bucket "$AWS_BUCKET" --key "$artifact_path" || echo '{}')"
obj_md5="$(jq -r .ETag <<<"$obj" | jq -r)" # head-object call returns ETag quoted, so `jq -r` again to unquote it
if [[ "$md5" == "$obj_md5" ]]; then
echo "Artifact $artifact was already uploaded; exiting"
# If we already uploaded to a tag, that's probably bad
is_tag && exit 1 || exit 0
fi
done
}
check_reupload "$DEST"
if ! is_tag; then
check_reupload "$GIT_ISH"
fi
aws s3 sync "$DEST"/ s3://"$AWS_BUCKET"/"$DEST"/ --acl public-read
if ! is_tag; then
aws s3 sync "$GIT_ISH"/ s3://"$AWS_BUCKET"/"$GIT_ISH"/ --acl public-read
fi
cat <<-EOF >> $GITHUB_STEP_SUMMARY
This commit's flake-checker binaries can be fetched from:
Intel macOS:
\`\`\`
curl --output flake-checker --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/flake-checker/rev/$GIT_ISH/X64-macOS
\`\`\`
x86_64 Linux:
\`\`\`
curl --output flake-checker --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/flake-checker/rev/$GIT_ISH/X64-Linux
\`\`\`
Or generally from this ${TYPE}:
Intel macOS:
\`\`\`
curl --output flake-checker --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/flake-checker/${TYPE}/${TYPE_ID}/X64-macOS
\`\`\`
x86_64 Linux:
\`\`\`
curl --output flake-checker --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/flake-checker/${TYPE}/${TYPE_ID}/X64-Linux
\`\`\`
EOF

103
crane.nix Normal file
View File

@ -0,0 +1,103 @@
{ stdenv
, pkgs
, lib
, crane
, rust
, rust-bin
, nix-gitignore
, supportedSystems
}:
let
inherit (stdenv.hostPlatform) system;
nightlyVersion = "2023-05-01";
rustNightly = pkgs.rust-bin.nightly.${nightlyVersion}.default.override {
extensions = [ "rust-src" "rust-analyzer-preview" ];
targets = cargoTargets;
};
# For easy cross-compilation in devShells
# We are just composing the pkgsCross.*.stdenv.cc together
crossPlatforms = let
makeCrossPlatform = crossSystem: let
pkgsCross =
if crossSystem == system then pkgs
else import pkgs.path {
inherit system crossSystem;
overlays = [];
};
rustTargetSpec = rust.toRustTargetSpec pkgsCross.pkgsStatic.stdenv.hostPlatform;
rustTargetSpecUnderscored = builtins.replaceStrings [ "-" ] [ "_" ] rustTargetSpec;
cargoLinkerEnv = lib.strings.toUpper "CARGO_TARGET_${rustTargetSpecUnderscored}_LINKER";
cargoCcEnv = "CC_${rustTargetSpecUnderscored}"; # for ring
cc = "${pkgsCross.stdenv.cc}/bin/${pkgsCross.stdenv.cc.targetPrefix}cc";
in {
name = crossSystem;
value = {
inherit rustTargetSpec cc;
pkgs = pkgsCross;
env = {
"${cargoLinkerEnv}" = cc;
"${cargoCcEnv}" = cc;
};
};
};
systems = lib.filter (s: s == system || lib.hasInfix "linux" s) supportedSystems;
in builtins.listToAttrs (map makeCrossPlatform systems);
cargoTargets = lib.mapAttrsToList (_: p: p.rustTargetSpec) crossPlatforms;
cargoCrossEnvs = lib.foldl (acc: p: acc // p.env) {} (builtins.attrValues crossPlatforms);
buildFor = system: let
crossPlatform = crossPlatforms.${system};
inherit (crossPlatform) pkgs;
craneLib = (crane.mkLib pkgs).overrideToolchain rustNightly;
crateName = craneLib.crateNameFromCargoToml {
cargoToml = ./Cargo.toml;
};
src = nix-gitignore.gitignoreSource [] ./.;
commonArgs = {
inherit (crateName) pname version;
inherit src;
buildInputs = with pkgs; []
++ lib.optionals pkgs.stdenv.isDarwin [
darwin.apple_sdk.frameworks.Security
];
nativeBuildInputs = with pkgs; []
# The Rust toolchain from rust-overlay has a dynamic libiconv in depsTargetTargetPropagated
# Our static libiconv needs to take precedence
++ lib.optionals pkgs.stdenv.isDarwin [
(libiconv.override { enableStatic = true; enableShared = false; })
];
cargoExtraArgs = "--target ${crossPlatform.rustTargetSpec}";
cargoVendorDir = craneLib.vendorMultipleCargoDeps {
inherit (craneLib.findCargoFiles src) cargoConfigs;
cargoLockList = [
./Cargo.lock
"${rustNightly.passthru.availableComponents.rust-src}/lib/rustlib/src/rust/Cargo.lock"
];
};
} // crossPlatform.env;
crate = craneLib.buildPackage (commonArgs // {
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
# The resulting executable must be standalone
allowedRequisites = [];
});
in crate;
in {
inherit crossPlatforms cargoTargets cargoCrossEnvs rustNightly;
flake-checker = buildFor system;
}

View File

@ -1,5 +1,48 @@
{
"nodes": {
"crane": {
"inputs": {
"flake-compat": [
"flake-compat"
],
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": "rust-overlay"
},
"locked": {
"lastModified": 1684468982,
"narHash": "sha256-EoC1N5sFdmjuAP3UOkyQujSOT6EdcXTnRw8hPjJkEgc=",
"owner": "ipetkov",
"repo": "crane",
"rev": "99de890b6ef4b4aab031582125b6056b792a4a30",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
@ -20,45 +63,62 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1684873761,
"narHash": "sha256-9P9KnrDN26Q73n91nK1LHdbd5JDX8S6GB4XKjXMejpY=",
"lastModified": 1684844536,
"narHash": "sha256-M7HhXYVqAuNb25r/d3FOO0z4GxPqDIZp5UjHFbBgw0Q=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b1a6fd2318766fd9b5d48c7b0ba58c7880ab068e",
"rev": "d30264c2691128adc261d7c9388033645f0e742b",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1681358109,
"narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"crane": "crane",
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
"rust-overlay": "rust-overlay_2"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs_2"
"flake-utils": [
"crane",
"flake-utils"
],
"nixpkgs": [
"crane",
"nixpkgs"
]
},
"locked": {
"lastModified": 1683080331,
"narHash": "sha256-nGDvJ1DAxZIwdn6ww8IFwzoHb2rqBP4wv/65Wt5vflk=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "d59c3fa0cba8336e115b376c2d9e91053aa59e56",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"rust-overlay_2": {
"inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1684808436,

105
flake.nix
View File

@ -1,57 +1,62 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs";
rust-overlay.url = "github:oxalica/rust-overlay";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
inputs.flake-utils.follows = "flake-utils";
};
crane = {
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs";
inputs.flake-compat.follows = "flake-compat";
inputs.flake-utils.follows = "flake-utils";
};
flake-compat = {
url = "github:edolstra/flake-compat";
flake = false;
};
};
outputs = { self, nixpkgs, ... }@inputs:
let
outputs = { self, nixpkgs, flake-utils, rust-overlay, crane, ... }: let
supportedSystems = flake-utils.lib.defaultSystems;
in flake-utils.lib.eachSystem supportedSystems (system: let
pkgs = import nixpkgs {
inherit system;
overlays = [
inputs.rust-overlay.overlays.default
(final: prev: {
rustToolchain = prev.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
})
rust-overlay.overlay
];
systems = [ "aarch64-linux" "aarch64-darwin" "x86_64-linux" "x86_64-darwin" ];
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f {
pkgs = import nixpkgs { inherit system overlays; };
});
in
{
devShells = forAllSystems ({ pkgs }: {
default = pkgs.mkShell {
packages = with pkgs; [
rustToolchain
cargo-edit
cargo-watch
rust-analyzer
];
};
ci = pkgs.mkShell {
packages = with pkgs; [
rustToolchain
];
};
});
packages = forAllSystems ({ pkgs }:
let
meta = (builtins.fromTOML (builtins.readFile ./Cargo.toml)).package;
rust = pkgs.makeRustPlatform {
cargo = pkgs.rustToolchain;
rustc = pkgs.rustToolchain;
};
in
{
default =
rust.buildRustPackage {
pname = meta.name;
version = meta.version;
src = ./.;
cargoHash = "sha256-toXBfFKKa1Vk3aeafPVLwHN3M5IW9BZckRv/9CLsJZA=";
};
});
};
inherit (pkgs) lib;
cranePkgs = pkgs.callPackage ./crane.nix {
inherit crane supportedSystems;
};
in {
packages = rec {
inherit (cranePkgs) flake-checker;
default = flake-checker;
};
devShells = {
default = pkgs.mkShell ({
inputsFrom = [ cranePkgs.flake-checker ];
packages = with pkgs; [
bashInteractive
cranePkgs.rustNightly
cargo-bloat
cargo-edit
cargo-udeps
cargo-edit
cargo-watch
rust-analyzer
];
} // cranePkgs.cargoCrossEnvs);
};
});
}