Merge pull request #566 from kinode-dao/hf/modularize-build

build: break out package building into its own script
This commit is contained in:
nick.kino 2024-10-14 10:16:58 -07:00 committed by GitHub
commit e1e3c69ff5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 386 additions and 297 deletions

93
Cargo.lock generated
View File

@ -1402,6 +1402,20 @@ dependencies = [
"zeroize",
]
[[package]]
name = "build_packages"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"fs-err",
"kit 0.7.7",
"serde_json",
"tokio",
"walkdir",
"zip 0.6.6",
]
[[package]]
name = "bumpalo"
version = "3.16.0"
@ -2628,18 +2642,6 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
dependencies = [
"cfg-if",
"libc",
"libredox",
"windows-sys 0.59.0",
]
[[package]]
name = "fixed-hash"
version = "0.8.0"
@ -3654,7 +3656,6 @@ dependencies = [
"clap",
"crossterm",
"dashmap 5.5.3",
"flate2",
"futures",
"generic-array",
"hex",
@ -3662,7 +3663,6 @@ dependencies = [
"http 1.1.0",
"indexmap",
"jwt",
"kit",
"lazy_static",
"lib",
"libc",
@ -3683,19 +3683,16 @@ dependencies = [
"snow",
"socket2 0.5.7",
"static_dir",
"tar",
"thiserror",
"tokio",
"tokio-tungstenite 0.21.0",
"unicode-segmentation",
"unicode-width",
"url",
"walkdir",
"warp",
"wasi-common",
"wasmtime",
"wasmtime-wasi",
"zip 0.6.6",
"zip 1.1.4",
]
@ -3788,6 +3785,43 @@ dependencies = [
"zip 0.6.6",
]
[[package]]
name = "kit"
version = "0.7.7"
source = "git+https://github.com/kinode-dao/kit?rev=9c94b4b#9c94b4bd3f2a9dc2eabb2da9bc2ef5e6eb07af9d"
dependencies = [
"alloy 0.1.4",
"alloy-sol-macro",
"alloy-sol-types",
"anyhow",
"base64 0.21.7",
"cargo_metadata",
"clap",
"color-eyre",
"dirs 5.0.1",
"fs-err",
"git2",
"hex",
"kinode_process_lib 0.9.2",
"nix 0.27.1",
"regex",
"reqwest",
"rpassword",
"semver 1.0.23",
"serde",
"serde_json",
"sha2",
"tokio",
"toml",
"tracing",
"tracing-appender",
"tracing-error",
"tracing-subscriber",
"walkdir",
"wit-bindgen",
"zip 0.6.6",
]
[[package]]
name = "kns_indexer"
version = "0.2.0"
@ -3827,7 +3861,7 @@ name = "lib"
version = "0.9.7"
dependencies = [
"alloy 0.2.1",
"kit",
"kit 0.7.6",
"lazy_static",
"rand 0.8.5",
"ring",
@ -3883,7 +3917,6 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.6.0",
"libc",
"redox_syscall",
]
[[package]]
@ -5941,17 +5974,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tar"
version = "0.4.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "target-lexicon"
version = "0.12.16"
@ -7715,17 +7737,6 @@ dependencies = [
"tap",
]
[[package]]
name = "xattr"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f"
dependencies = [
"libc",
"linux-raw-sys",
"rustix",
]
[[package]]
name = "zerocopy"
version = "0.7.35"

View File

@ -26,7 +26,7 @@ members = [
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",
"kinode/packages/terminal/kill", "kinode/packages/terminal/m", "kinode/packages/terminal/top",
"kinode/packages/terminal/net_diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers",
"kinode/packages/tester/tester",
"kinode/packages/tester/tester", "scripts/build_packages",
]
default-members = ["lib"]
resolver = "2"

View File

@ -10,5 +10,4 @@ ENV NVM_DIR=/root/.nvm \
WORKDIR /input
# Set the default command to run the build script
# TODO: once build is idempotent, remove the `rm -rf` line
CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && rm -rf target/ kinode/packages/*/pkg/*wasm kinode/packages/*/*/target/ kinode/packages/*/pkg/api.zip kinode/packages/*/*/wit kinode/packages/app_store/pkg/ui kinode/packages/homepage/pkg/ui kinode/src/register-ui/build && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && rm -rf target/ kinode/packages/*/pkg/*wasm kinode/packages/*/*/target/ kinode/packages/*/pkg/api.zip kinode/packages/*/*/wit kinode/packages/app_store/pkg/ui kinode/packages/homepage/pkg/ui kinode/src/register-ui/build"]
CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && find . -user root -print0 2>/dev/null | xargs -0 rm -rf"]

View File

@ -28,26 +28,26 @@ On certain operating systems, you may need to install these dependencies if they
git clone git@github.com:kinode-dao/kinode.git
# Get some stuff so we can build Wasm.
# Install Rust and some `cargo` tools so we can build the runtime and Wasm.
cd kinode
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
cargo install wasm-tools
rustup install nightly
rustup target add wasm32-wasi
rustup target add wasm32-wasi --toolchain nightly
rustup target add wasm32-wasip1
rustup target add wasm32-wasip1 --toolchain nightly
cargo install cargo-wasi
# Install NPM so we can build frontends for "distro" packages.
# https://docs.npmjs.com/downloading-and-installing-node-js-and-npm
# If you want to skip this step, run cargo build with the environment variable SKIP_BUILD_FRONTEND=true
# If you want to skip this step, build the packages with `cargo run -p build_packages -- --skip-build-frontend` to neglect building the frontends
# Build the runtime, along with a number of "distro" Wasm modules.
# The compiled binary will be at `kinode/target/debug/kinode`
# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`)
# Build the "distro" Wasm modules, then, build the runtime.
# The compiled packages will be at `kinode/target/packages.zip`.
# The compiled binary will be at `kinode/target/debug/kinode`.
# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`).
cargo +nightly build -p kinode
cd kinode
cargo run -p build_packages
cargo build -p kinode
```
## Security Status

View File

@ -14,12 +14,7 @@ path = "src/main.rs"
[build-dependencies]
anyhow = "1.0.71"
flate2 = "1.0"
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
tar = "0.4"
tokio = "1.28"
walkdir = "2.4"
zip = "0.6"
sha2 = "0.10.8"
[features]
simulation-mode = []

View File

@ -1,12 +1,9 @@
use std::{
fs::{self, File},
io::{BufReader, Cursor, Read, Write},
path::{Path, PathBuf},
};
use std::io::Read;
use std::path::{Path, PathBuf};
use flate2::read::GzDecoder;
use tar::Archive;
use zip::write::FileOptions;
use sha2::Digest;
const CANONICAL_PACKAGES_ZIP_PATH: &str = "../target/packages.zip";
macro_rules! p {
($($tokens: tt)*) => {
@ -14,214 +11,64 @@ macro_rules! p {
}
}
/// get cargo features to compile packages with
fn get_features() -> String {
let mut features = "".to_string();
for (key, _) in std::env::vars() {
if key.starts_with("CARGO_FEATURE_") {
let feature = key
.trim_start_matches("CARGO_FEATURE_")
.to_lowercase()
.replace("_", "-");
features.push_str(&feature);
fn compute_hash(file_path: &Path) -> anyhow::Result<String> {
let input_file = std::fs::File::open(file_path)?;
let mut reader = std::io::BufReader::new(input_file);
let mut hasher = sha2::Sha256::new();
let mut buffer = [0; 1024]; // buffer for chunks of the file
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
hasher.update(&buffer[..count]);
}
features
}
/// print `cargo:rerun-if-changed=PATH` for each path of interest
fn output_reruns(dir: &Path) {
// Check files individually
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if path.is_dir() {
if let Some(dirname) = path.file_name().and_then(|n| n.to_str()) {
if dirname == "ui" || dirname == "target" {
// do not prompt a rerun if only UI/build files have changed
continue;
}
// If the entry is a directory not in rerun_files, recursively walk it
output_reruns(&path);
}
} else {
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
if filename.ends_with(".zip") || filename.ends_with(".wasm") {
// do not prompt a rerun for compiled outputs
continue;
}
// any other changed file within a package subdir prompts a rerun
println!("cargo::rerun-if-changed={}", path.display());
}
}
}
}
}
fn _untar_gz_file(path: &Path, dest: &Path) -> std::io::Result<()> {
// Open the .tar.gz file
let tar_gz = File::open(path)?;
let tar_gz_reader = BufReader::new(tar_gz);
// Decode the gzip layer
let tar = GzDecoder::new(tar_gz_reader);
// Create a new archive from the tar file
let mut archive = Archive::new(tar);
// Unpack the archive into the specified destination directory
archive.unpack(dest)?;
Ok(())
}
fn build_and_zip_package(
entry_path: PathBuf,
parent_pkg_path: &str,
features: &str,
) -> anyhow::Result<(String, String, Vec<u8>)> {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
kit::build::execute(
&entry_path,
true,
false,
true,
features,
None,
None,
None,
vec![],
vec![],
false,
false,
false,
)
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
let mut writer = Cursor::new(Vec::new());
let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755)
.last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap());
{
let mut zip = zip::ZipWriter::new(&mut writer);
for sub_entry in walkdir::WalkDir::new(parent_pkg_path) {
let sub_entry = sub_entry?;
let path = sub_entry.path();
let name = path.strip_prefix(Path::new(parent_pkg_path))?;
if path.is_file() {
zip.start_file(name.to_string_lossy(), options)?;
let mut file = File::open(path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
zip.write_all(&buffer)?;
} else if !name.as_os_str().is_empty() {
zip.add_directory(name.to_string_lossy(), options)?;
}
}
zip.finish()?;
}
let zip_contents = writer.into_inner();
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
Ok((entry_path.display().to_string(), zip_filename, zip_contents))
})
Ok(format!("{:x}", hasher.finalize()))
}
fn main() -> anyhow::Result<()> {
if std::env::var("SKIP_BUILD_SCRIPT").is_ok() {
p!("skipping build script");
return Ok(());
let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") {
Ok(env_var) => env_var,
Err(_) => {
let canonical_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH);
if canonical_path.exists() {
p!("No path given via PATH_TO_PACKAGES_ZIP envvar. Defaulting to path of `kinode/target/packages.zip`.");
CANONICAL_PACKAGES_ZIP_PATH.to_string()
} else {
return Err(anyhow::anyhow!("You must build packages.zip with scripts/build_packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated)."));
}
}
};
let path = PathBuf::from(&path_to_packages_zip);
if !path.exists() {
return Err(anyhow::anyhow!(
"Path to packages {path_to_packages_zip} does not exist."
));
}
let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?;
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?;
if path_to_packages_zip_path != canonical_packages_zip_path {
std::fs::copy(&path_to_packages_zip_path, &canonical_packages_zip_path)?;
}
// build core frontends
let pwd = std::env::current_dir()?;
let parent_dir = pwd.parent().unwrap();
let packages_dir = pwd.join("packages");
let core_frontends = vec!["src/register-ui"];
if std::env::var("SKIP_BUILD_FRONTEND").is_ok() {
p!("skipping frontend builds");
} else {
// build core frontends
let core_frontends = vec![
"src/register-ui",
"packages/app_store/ui",
"packages/homepage/ui",
// chess when brought in
];
// for each frontend, execute build.sh
for frontend in core_frontends {
let status = std::process::Command::new("sh")
.current_dir(pwd.join(frontend))
.arg("./build.sh")
.status()?;
if !status.success() {
return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend));
}
// for each frontend, execute build.sh
for frontend in core_frontends {
let status = std::process::Command::new("sh")
.current_dir(pwd.join(frontend))
.arg("./build.sh")
.status()?;
if !status.success() {
return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend));
}
}
output_reruns(&packages_dir);
let features = get_features();
let results: Vec<anyhow::Result<(String, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
.filter_map(|entry| {
let entry_path = match entry {
Ok(e) => e.path(),
Err(_) => return None,
};
let child_pkg_path = entry_path.join("pkg");
if !child_pkg_path.exists() {
// don't run on, e.g., `.DS_Store`
return None;
}
Some(build_and_zip_package(
entry_path.clone(),
child_pkg_path.to_str().unwrap(),
&features,
))
})
.collect();
// Process results, e.g., write to `bootstrapped_processes.rs`
// This part remains sequential
let mut bootstrapped_processes = vec![];
writeln!(
bootstrapped_processes,
"pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &["
)?;
for result in results {
match result {
Ok((entry_path, zip_filename, zip_contents)) => {
// Further processing, like saving ZIP files and updating bootstrapped_processes
let metadata_path = format!("{}/metadata.json", entry_path);
let zip_path = format!("{}/target/{}", parent_dir.display(), zip_filename);
fs::write(&zip_path, &zip_contents)?;
writeln!(
bootstrapped_processes,
" (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\")),",
zip_filename, metadata_path, zip_path,
)?;
}
Err(e) => return Err(e),
}
}
writeln!(bootstrapped_processes, "];")?;
let target_dir = pwd.join("../target");
if !target_dir.exists() {
fs::create_dir_all(&target_dir)?;
}
let bootstrapped_processes_path = target_dir.join("bootstrapped_processes.rs");
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
let version = if let Ok(version) = std::env::var("DOCKER_BUILD_IMAGE_VERSION") {
// embed the DOCKER_BUILD_IMAGE_VERSION
version
@ -230,5 +77,8 @@ fn main() -> anyhow::Result<()> {
};
println!("cargo:rustc-env=DOCKER_BUILD_IMAGE_VERSION={version}");
let packages_zip_hash = compute_hash(&canonical_packages_zip_path)?;
println!("cargo:rustc-env=PACKAGES_ZIP_HASH={packages_zip_hash}");
Ok(())
}

View File

@ -1,5 +1,3 @@
#![feature(async_closure)]
#![feature(btree_extract_if)]
use anyhow::Result;
use clap::{arg, value_parser, Command};
use lib::types::core::{
@ -66,10 +64,17 @@ pub const MULTICALL_ADDRESS: &str = "0xcA11bde05977b3631167028862bE2a173976CA11"
#[tokio::main]
async fn main() {
// embed values in binary for inspection without running & print on boot
// e.g., to inspect without running, use
// ```bash
// strings kinode | grep DOCKER_BUILD_IMAGE_VERSION
// ```
println!(
"\nDOCKER_BUILD_IMAGE_VERSION: {}\n",
env!("DOCKER_BUILD_IMAGE_VERSION")
"\nDOCKER_BUILD_IMAGE_VERSION: {}\nPACKAGES_ZIP_HASH: {}\n",
env!("DOCKER_BUILD_IMAGE_VERSION"),
env!("PACKAGES_ZIP_HASH"),
);
let app = build_command();
let matches = app.get_matches();

View File

@ -14,7 +14,8 @@ use std::{
};
use tokio::{fs, io::AsyncWriteExt, sync::Mutex};
include!("../../target/bootstrapped_processes.rs");
static PACKAGES_ZIP: &[u8] = include_bytes!("../../target/packages.zip");
const FILE_TO_METADATA: &str = "file_to_metadata.json";
pub async fn load_state(
our_name: String,
@ -381,7 +382,7 @@ async fn bootstrap(
current.capabilities.extend(runtime_caps.clone());
}
let packages = get_zipped_packages().await;
let packages = get_zipped_packages();
for (package_metadata, mut package) in packages.clone() {
let package_name = package_metadata.properties.package_name.as_str();
@ -412,7 +413,7 @@ async fn bootstrap(
let mut zip_file =
fs::File::create(format!("{}/{}.zip", &pkg_path, &our_drive_name)).await?;
let package_zip_bytes = package.clone().into_inner().into_inner();
zip_file.write_all(package_zip_bytes).await?;
zip_file.write_all(&package_zip_bytes).await?;
// for each file in package.zip, write to vfs folder
for i in 0..package.len() {
@ -713,20 +714,28 @@ fn sign_cap(cap: Capability, keypair: Arc<signature::Ed25519KeyPair>) -> Vec<u8>
}
/// read in `include!()`ed .zip package files
async fn get_zipped_packages() -> Vec<(
Erc721Metadata,
zip::ZipArchive<std::io::Cursor<&'static [u8]>>,
)> {
fn get_zipped_packages() -> Vec<(Erc721Metadata, zip::ZipArchive<std::io::Cursor<Vec<u8>>>)> {
let mut packages = Vec::new();
for (package_name, metadata_bytes, bytes) in BOOTSTRAPPED_PROCESSES.iter() {
if let Ok(zip) = zip::ZipArchive::new(std::io::Cursor::new(*bytes)) {
if let Ok(metadata) = serde_json::from_slice::<Erc721Metadata>(metadata_bytes) {
packages.push((metadata, zip));
} else {
println!("fs: metadata for package {package_name} is not valid Erc721Metadata!\r",);
}
}
let mut packages_zip = zip::ZipArchive::new(std::io::Cursor::new(PACKAGES_ZIP)).unwrap();
let mut file_to_metadata = vec![];
packages_zip
.by_name(FILE_TO_METADATA)
.unwrap()
.read_to_end(&mut file_to_metadata)
.unwrap();
let file_to_metadata: HashMap<String, Erc721Metadata> =
serde_json::from_slice(&file_to_metadata).unwrap();
for (file_name, metadata) in file_to_metadata {
let mut zip_bytes = vec![];
packages_zip
.by_name(&file_name)
.unwrap()
.read_to_end(&mut zip_bytes)
.unwrap();
let zip_archive = zip::ZipArchive::new(std::io::Cursor::new(zip_bytes)).unwrap();
packages.push((metadata, zip_archive));
}
packages

View File

@ -1104,10 +1104,12 @@ impl Identity {
match &self.routing {
NodeRouting::Routers(_) => None,
NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => {
if let Some(port) = ports.get("ws")
&& *port != 0
{
Some((ip, port))
if let Some(port) = ports.get("ws") {
if *port != 0 {
Some((ip, port))
} else {
None
}
} else {
None
}
@ -1118,10 +1120,12 @@ impl Identity {
match &self.routing {
NodeRouting::Routers(_) => None,
NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => {
if let Some(port) = ports.get("tcp")
&& *port != 0
{
Some((ip, port))
if let Some(port) = ports.get("tcp") {
if *port != 0 {
Some((ip, port))
} else {
None
}
} else {
None
}

View File

@ -1,4 +1,4 @@
#![feature(let_chains)]
//#![feature(let_chains)]
pub mod core;
pub mod eth;

View File

@ -27,14 +27,32 @@ def build_and_move(feature, tmp_dir, architecture, os_name):
zip_prefix = f"kinode-{architecture}-{os_name}"
release_env = os.environ.copy()
release_env["CARGO_PROFILE_RELEASE_LTO"] = f"fat"
release_env["CARGO_PROFILE_RELEASE_CODEGEN_UNITS"] = f"1"
release_env["CARGO_PROFILE_RELEASE_STRIP"] = f"symbols"
release_env["CARGO_PROFILE_RELEASE_LTO"] = "fat"
release_env["CARGO_PROFILE_RELEASE_CODEGEN_UNITS"] = "1"
release_env["CARGO_PROFILE_RELEASE_STRIP"] = "symbols"
if feature:
subprocess.run(["cargo", "+nightly", "build", "--release", "-p", "kinode", "--features", feature], check=True, env=release_env)
release_env["PATH_TO_PACKAGES_ZIP"] = f"../target/packages-{feature}.zip"
subprocess.run(
["cargo", "run", "-p", "build_packages", "--", "--features", feature],
check=True,
#stdout=subprocess.PIPE,
#stderr=subprocess.PIPE,
)
subprocess.run(
["cargo", "build", "--release", "-p", "kinode", "--features", feature],
check=True,
env=release_env,
#stdout=subprocess.PIPE,
#stderr=subprocess.PIPE,
)
zip_name = f"{zip_prefix}-{feature}.zip"
else:
subprocess.run(["cargo", "+nightly", "build", "--release", "-p", "kinode"], check=True, env=release_env)
subprocess.run(["cargo", "run", "-p", "build_packages"], check=True)
subprocess.run(
["cargo", "build", "--release", "-p", "kinode"],
check=True,
env=release_env,
)
zip_name = f"{zip_prefix}.zip"
# Move and rename the binary
@ -74,4 +92,3 @@ def main():
if __name__ == "__main__":
main()

View File

@ -0,0 +1,14 @@
[package]
name = "build_packages"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.71"
clap = "4"
fs-err = "2.11"
kit = { git = "https://github.com/kinode-dao/kit", rev = "9c94b4b" }
serde_json = "1"
tokio = "1.28"
walkdir = "2.4"
zip = "0.6"

View File

@ -0,0 +1,185 @@
use std::{
io::{Cursor, Read, Write},
path::{Path, PathBuf},
};
use clap::{Arg, Command};
use fs_err as fs;
use zip::write::FileOptions;
fn zip_directory(dir_path: &Path) -> anyhow::Result<Vec<u8>> {
let mut writer = Cursor::new(Vec::new());
let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755)
.last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap());
{
let mut zip = zip::ZipWriter::new(&mut writer);
for sub_entry in walkdir::WalkDir::new(dir_path) {
let sub_entry = sub_entry?;
let path = sub_entry.path();
let name = path.strip_prefix(dir_path)?;
if path.is_file() {
zip.start_file(name.to_string_lossy(), options)?;
let mut file = fs::File::open(path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
zip.write_all(&buffer)?;
} else if !name.as_os_str().is_empty() {
zip.add_directory(name.to_string_lossy(), options)?;
}
}
zip.finish()?;
}
let zip_contents = writer.into_inner();
Ok(zip_contents)
}
fn build_and_zip_package(
entry_path: PathBuf,
parent_pkg_path: &str,
skip_frontend: bool,
features: &str,
) -> anyhow::Result<(PathBuf, String, Vec<u8>)> {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
kit::build::execute(
&entry_path,
skip_frontend,
false,
true,
features,
None,
None,
None,
vec![],
vec![],
false,
false,
false,
false,
)
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
let zip_contents = zip_directory(&Path::new(parent_pkg_path))?;
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
Ok((entry_path, zip_filename, zip_contents))
})
}
fn main() -> anyhow::Result<()> {
let matches = Command::new("build_packages")
.about("Build the core Kinode packages.")
.arg(
Arg::new("FEATURES")
.long("features")
.help("List of features to compile packages with")
.action(clap::ArgAction::Append),
)
.arg(
Arg::new("SKIP_FRONTEND")
.long("skip-build-frontend")
.help("Skip building the frontend")
.action(clap::ArgAction::SetTrue),
)
.arg(
Arg::new("OUTPUT_FILENAME")
.long("output-filename")
.help("Set output filename (default: packages-{features}.zip)")
.action(clap::ArgAction::Set),
)
.get_matches();
// kinode/target/debug/build_package
let current_exe_dir = std::env::current_exe() // build_package
.unwrap();
let top_level_dir = current_exe_dir
.parent() // debug/
.unwrap()
.parent() // target/
.unwrap()
.parent() // kinode/
.unwrap();
let kinode_dir = top_level_dir.join("kinode");
let packages_dir = kinode_dir.join("packages");
let mut features = matches
.get_many::<String>("FEATURES")
.unwrap_or_default()
.map(|s| s.to_owned())
.collect::<Vec<String>>();
features.sort();
let features = features.join(",");
let skip_frontend = matches.get_flag("SKIP_FRONTEND");
let results: Vec<anyhow::Result<(PathBuf, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
.filter_map(|entry| {
let entry_path = match entry {
Ok(e) => e.path(),
Err(_) => return None,
};
let child_pkg_path = entry_path.join("pkg");
if !child_pkg_path.exists() {
// don't run on, e.g., `.DS_Store`
return None;
}
Some(build_and_zip_package(
entry_path.clone(),
child_pkg_path.to_str().unwrap(),
skip_frontend,
&features,
))
})
.collect();
let mut file_to_metadata = std::collections::HashMap::new();
let target_dir = top_level_dir.join("target");
let target_packages_dir = target_dir.join("packages");
// fresh
if target_packages_dir.exists() {
fs::remove_dir_all(&target_packages_dir)?;
}
fs::create_dir_all(&target_packages_dir)?;
for result in results {
match result {
Ok((entry_path, zip_filename, zip_contents)) => {
let metadata_path = entry_path.join("metadata.json");
let metadata_contents = fs::read_to_string(&metadata_path)?;
let metadata_contents: serde_json::Value =
serde_json::from_str(&metadata_contents)?;
file_to_metadata.insert(zip_filename.clone(), metadata_contents);
let zip_path = target_packages_dir.join(&zip_filename);
fs::write(&zip_path, &zip_contents)?;
}
Err(e) => return Err(anyhow::anyhow!("{e:?}")),
}
}
let file_to_metadata = serde_json::to_value(&file_to_metadata)?;
let file_to_metadata = serde_json::to_string_pretty(&file_to_metadata)?;
let file_to_metadata_path = target_packages_dir.join("file_to_metadata.json");
fs::write(&file_to_metadata_path, file_to_metadata)?;
let package_zip_file_name = match matches.get_one::<String>("OUTPUT_FILENAME") {
Some(filename) => filename.to_string(),
None => {
if features.is_empty() {
"packages.zip".to_string()
} else {
format!("packages-{features}.zip")
}
}
};
let package_zip_path = target_dir.join(package_zip_file_name);
let package_zip_contents = zip_directory(&target_packages_dir)?;
fs::write(package_zip_path, package_zip_contents)?;
Ok(())
}