From d8070e3303efa9c2407e472e6916da30f22ff40c Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 7 Oct 2024 10:34:08 -0700 Subject: [PATCH 01/14] build: break out package building into its own script --- scripts/build-package.py | 146 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100755 scripts/build-package.py diff --git a/scripts/build-package.py b/scripts/build-package.py new file mode 100755 index 00000000..278dead9 --- /dev/null +++ b/scripts/build-package.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python3 + +import argparse +import io +import os +from pathlib import Path +import shutil +import subprocess +import sys +import zipfile + +def get_features(args): + # Join the features into a comma-separated string + features = ','.join(args.features) + return features + +def zip_directory(directory_path): + buffer = io.BytesIO() + with zipfile.ZipFile(buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file: + for root, dirs, files in os.walk(directory_path): + # Adding directories explicitly to ensure they are included in the zip + for dir in dirs: + dir_path = os.path.join(root, dir) + arcname = os.path.relpath(dir_path, start=directory_path) + # Create a ZipInfo object for the directory + zi = zipfile.ZipInfo(arcname + '/') + permissions = 0o755 + zi.external_attr = permissions << 16 | 0x10 # MS-DOS directory flag + zi.date_time = (2023, 6, 19, 0, 0, 0) + zip_file.writestr(zi, '') + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.relpath(file_path, start=directory_path) + # Get file info + st = os.stat(file_path) + # Create ZipInfo object + zi = zipfile.ZipInfo(arcname) + # Set permissions + permissions = st.st_mode + zi.external_attr = permissions << 16 + # Set date_time + zi.date_time = (2023, 6, 19, 0, 0, 0) + # Read file data + with open(file_path, 'rb') as f: + file_data = f.read() + zip_file.writestr(zi, file_data) + zip_contents = buffer.getvalue() + return zip_contents + +def build_and_zip_package(entry_path, parent_pkg_path, features): + # Build the package + build_cmd = ['kit', 'build', entry_path, '--no-ui', '--skip-deps-check'] + if features: + build_cmd += ['--features', features] + result = subprocess.run(build_cmd, cwd=entry_path) + if result.returncode != 0: + raise Exception(f'Failed to build package at {entry_path}') + + # Now zip up the parent_pkg_path directory + zip_filename = f'{os.path.basename(entry_path)}.zip' + zip_contents = zip_directory(parent_pkg_path) + + return (str(entry_path), zip_filename, zip_contents) + +def main(): + parser = argparse.ArgumentParser(description='Build and zip Rust packages.') + parser.add_argument('--features', nargs='*', default=[], help='List of features to compile packages with') + parser.add_argument('--skip-build-frontend', action='store_true', help='Skip building the frontend') + args = parser.parse_args() + + script_path = Path(os.path.abspath(__file__)) + top_level_dir = script_path.parent.parent + kinode_dir = top_level_dir / 'kinode' + packages_dir = kinode_dir / 'packages' + + if args.skip_build_frontend: + print("skipping frontend builds") + else: + # Build core frontends + core_frontends = [ + 'src/register-ui', + 'packages/app_store/ui', + 'packages/homepage/ui', + # chess when brought in + ] + + # For each frontend, execute build.sh + for frontend in core_frontends: + frontend_path = kinode_dir / frontend + build_script = frontend_path / 'build.sh' + if not build_script.exists(): + print(f'Build script not found for frontend: {frontend} at {build_script}') + continue + result = subprocess.run(['sh', './build.sh'], cwd=frontend_path) + if result.returncode != 0: + raise Exception(f'Failed to build frontend: {frontend}') + + features = get_features(args) + + results = [] + for entry in os.scandir(packages_dir): + if not entry.is_dir(): + continue + entry_path = Path(entry.path) + child_pkg_path = entry_path / 'pkg' + if not child_pkg_path.exists(): + continue + result = build_and_zip_package(str(entry_path), str(child_pkg_path), features) + results.append(result) + + # Process results + bootstrapped_processes = [] + bootstrapped_processes.append('pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &[') + + target_dir = top_level_dir / 'target' + target_packages_dir = target_dir / 'packages' + if not target_packages_dir.exists(): + os.makedirs(target_packages_dir) + + for (entry_path, zip_filename, zip_contents) in results: + # Save zip_contents to zip_path + zip_path = target_packages_dir / zip_filename + with open(zip_path, 'wb') as f: + f.write(zip_contents) + + metadata_path = os.path.join(entry_path, 'metadata.json') + + # Update bootstrapped_processes + bootstrapped_processes.append( + f' ("{zip_filename}", include_bytes!("{metadata_path}"), include_bytes!("{zip_path}")),' + ) + + bootstrapped_processes.append('];') + + bootstrapped_processes_path = target_packages_dir / 'bootstrapped_processes.rs' + with open(bootstrapped_processes_path, 'w') as f: + f.write('\n'.join(bootstrapped_processes)) + + zip_contents = zip_directory(target_packages_dir) + zip_path = target_dir / 'packages.zip' + + with open(zip_path, 'wb') as f: + f.write(zip_contents) + +if __name__ == '__main__': + main() From 00688d96a55242e46fd14cb2738eadd2886dd217 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 7 Oct 2024 14:51:35 -0700 Subject: [PATCH 02/14] build: use rust instead of python (so we can enforce deps) --- Cargo.lock | 13 ++ Cargo.toml | 2 +- scripts/build-package.py | 146 -------------------- scripts/build_package/Cargo.toml | 13 ++ scripts/build_package/src/main.rs | 215 ++++++++++++++++++++++++++++++ 5 files changed, 242 insertions(+), 147 deletions(-) delete mode 100755 scripts/build-package.py create mode 100644 scripts/build_package/Cargo.toml create mode 100644 scripts/build_package/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index e941da6c..87af1b3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1402,6 +1402,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "build_package" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "fs-err", + "kit", + "tokio", + "walkdir", + "zip 0.6.6", +] + [[package]] name = "bumpalo" version = "3.16.0" diff --git a/Cargo.toml b/Cargo.toml index 8e405dd6..9b14b678 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,7 +26,7 @@ members = [ "kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch", "kinode/packages/terminal/kill", "kinode/packages/terminal/m", "kinode/packages/terminal/top", "kinode/packages/terminal/net_diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers", - "kinode/packages/tester/tester", + "kinode/packages/tester/tester", "scripts/build_package", ] default-members = ["lib"] resolver = "2" diff --git a/scripts/build-package.py b/scripts/build-package.py deleted file mode 100755 index 278dead9..00000000 --- a/scripts/build-package.py +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import io -import os -from pathlib import Path -import shutil -import subprocess -import sys -import zipfile - -def get_features(args): - # Join the features into a comma-separated string - features = ','.join(args.features) - return features - -def zip_directory(directory_path): - buffer = io.BytesIO() - with zipfile.ZipFile(buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file: - for root, dirs, files in os.walk(directory_path): - # Adding directories explicitly to ensure they are included in the zip - for dir in dirs: - dir_path = os.path.join(root, dir) - arcname = os.path.relpath(dir_path, start=directory_path) - # Create a ZipInfo object for the directory - zi = zipfile.ZipInfo(arcname + '/') - permissions = 0o755 - zi.external_attr = permissions << 16 | 0x10 # MS-DOS directory flag - zi.date_time = (2023, 6, 19, 0, 0, 0) - zip_file.writestr(zi, '') - for file in files: - file_path = os.path.join(root, file) - arcname = os.path.relpath(file_path, start=directory_path) - # Get file info - st = os.stat(file_path) - # Create ZipInfo object - zi = zipfile.ZipInfo(arcname) - # Set permissions - permissions = st.st_mode - zi.external_attr = permissions << 16 - # Set date_time - zi.date_time = (2023, 6, 19, 0, 0, 0) - # Read file data - with open(file_path, 'rb') as f: - file_data = f.read() - zip_file.writestr(zi, file_data) - zip_contents = buffer.getvalue() - return zip_contents - -def build_and_zip_package(entry_path, parent_pkg_path, features): - # Build the package - build_cmd = ['kit', 'build', entry_path, '--no-ui', '--skip-deps-check'] - if features: - build_cmd += ['--features', features] - result = subprocess.run(build_cmd, cwd=entry_path) - if result.returncode != 0: - raise Exception(f'Failed to build package at {entry_path}') - - # Now zip up the parent_pkg_path directory - zip_filename = f'{os.path.basename(entry_path)}.zip' - zip_contents = zip_directory(parent_pkg_path) - - return (str(entry_path), zip_filename, zip_contents) - -def main(): - parser = argparse.ArgumentParser(description='Build and zip Rust packages.') - parser.add_argument('--features', nargs='*', default=[], help='List of features to compile packages with') - parser.add_argument('--skip-build-frontend', action='store_true', help='Skip building the frontend') - args = parser.parse_args() - - script_path = Path(os.path.abspath(__file__)) - top_level_dir = script_path.parent.parent - kinode_dir = top_level_dir / 'kinode' - packages_dir = kinode_dir / 'packages' - - if args.skip_build_frontend: - print("skipping frontend builds") - else: - # Build core frontends - core_frontends = [ - 'src/register-ui', - 'packages/app_store/ui', - 'packages/homepage/ui', - # chess when brought in - ] - - # For each frontend, execute build.sh - for frontend in core_frontends: - frontend_path = kinode_dir / frontend - build_script = frontend_path / 'build.sh' - if not build_script.exists(): - print(f'Build script not found for frontend: {frontend} at {build_script}') - continue - result = subprocess.run(['sh', './build.sh'], cwd=frontend_path) - if result.returncode != 0: - raise Exception(f'Failed to build frontend: {frontend}') - - features = get_features(args) - - results = [] - for entry in os.scandir(packages_dir): - if not entry.is_dir(): - continue - entry_path = Path(entry.path) - child_pkg_path = entry_path / 'pkg' - if not child_pkg_path.exists(): - continue - result = build_and_zip_package(str(entry_path), str(child_pkg_path), features) - results.append(result) - - # Process results - bootstrapped_processes = [] - bootstrapped_processes.append('pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &[') - - target_dir = top_level_dir / 'target' - target_packages_dir = target_dir / 'packages' - if not target_packages_dir.exists(): - os.makedirs(target_packages_dir) - - for (entry_path, zip_filename, zip_contents) in results: - # Save zip_contents to zip_path - zip_path = target_packages_dir / zip_filename - with open(zip_path, 'wb') as f: - f.write(zip_contents) - - metadata_path = os.path.join(entry_path, 'metadata.json') - - # Update bootstrapped_processes - bootstrapped_processes.append( - f' ("{zip_filename}", include_bytes!("{metadata_path}"), include_bytes!("{zip_path}")),' - ) - - bootstrapped_processes.append('];') - - bootstrapped_processes_path = target_packages_dir / 'bootstrapped_processes.rs' - with open(bootstrapped_processes_path, 'w') as f: - f.write('\n'.join(bootstrapped_processes)) - - zip_contents = zip_directory(target_packages_dir) - zip_path = target_dir / 'packages.zip' - - with open(zip_path, 'wb') as f: - f.write(zip_contents) - -if __name__ == '__main__': - main() diff --git a/scripts/build_package/Cargo.toml b/scripts/build_package/Cargo.toml new file mode 100644 index 00000000..8ac08b2a --- /dev/null +++ b/scripts/build_package/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "build_package" +version = "0.1.0" +edition = "2021" + +[dependencies] +anyhow = "1.0.71" +clap = "4" +fs-err = "2.11" +kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" } +tokio = "1.28" +walkdir = "2.4" +zip = "0.6" diff --git a/scripts/build_package/src/main.rs b/scripts/build_package/src/main.rs new file mode 100644 index 00000000..f18a8cae --- /dev/null +++ b/scripts/build_package/src/main.rs @@ -0,0 +1,215 @@ +use std::{ + io::{Cursor, Read, Write}, + path::{Path, PathBuf}, +}; + +use clap::{Arg, Command}; +use fs_err as fs; +use zip::write::FileOptions; + +fn zip_directory(dir_path: &Path) -> anyhow::Result> { + let mut writer = Cursor::new(Vec::new()); + let options = FileOptions::default() + .compression_method(zip::CompressionMethod::Deflated) + .unix_permissions(0o755) + .last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap()); + { + let mut zip = zip::ZipWriter::new(&mut writer); + + for sub_entry in walkdir::WalkDir::new(dir_path) { + let sub_entry = sub_entry?; + let path = sub_entry.path(); + let name = path.strip_prefix(dir_path)?; + + if path.is_file() { + zip.start_file(name.to_string_lossy(), options)?; + let mut file = fs::File::open(path)?; + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer)?; + zip.write_all(&buffer)?; + } else if !name.as_os_str().is_empty() { + zip.add_directory(name.to_string_lossy(), options)?; + } + } + zip.finish()?; + } + + let zip_contents = writer.into_inner(); + Ok(zip_contents) +} + +fn build_and_zip_package( + entry_path: PathBuf, + parent_pkg_path: &str, + features: &str, +) -> anyhow::Result<(PathBuf, String, Vec)> { + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async { + kit::build::execute( + &entry_path, + true, + false, + true, + features, + None, + None, + None, + vec![], + vec![], + false, + false, + false, + ) + .await + .map_err(|e| anyhow::anyhow!("{:?}", e))?; + + let zip_contents = zip_directory(&Path::new(parent_pkg_path))?; + let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap()); + Ok((entry_path, zip_filename, zip_contents)) + }) +} + +fn main() -> anyhow::Result<()> { + let matches = Command::new("build_package") + .about("Build the core Kinode packages.") + .arg(Arg::new("FEATURES") + .long("features") + .help("List of features to compile packages with") + .action(clap::ArgAction::Append)) + .arg(Arg::new("SKIP_FRONTEND") + .long("skip-build-frontend") + .help("Skip building the frontend") + .action(clap::ArgAction::SetTrue)) + .get_matches(); + + + println!("a"); + // kinode/target/debug/build_package + let current_exe_dir = std::env::current_exe() // build_package + .unwrap(); + let top_level_dir = current_exe_dir + .parent() // debug/ + .unwrap() + .parent() // target/ + .unwrap() + .parent() // kinode/ + .unwrap(); + let kinode_dir = top_level_dir.join("kinode"); + let packages_dir = kinode_dir.join("packages"); + + println!("{current_exe_dir:?} {top_level_dir:?} {kinode_dir:?} {packages_dir:?}"); + + println!("b"); + if matches.get_flag("SKIP_FRONTEND") { + println!("skipping frontend builds"); + } else { + // build core frontends + let core_frontends = vec![ + "src/register-ui", + "packages/app_store/ui", + "packages/homepage/ui", + // chess when brought in + ]; + + // for each frontend, execute build.sh + for frontend in core_frontends { + let frontend_path = kinode_dir.join(frontend); + if !frontend_path.exists() { + panic!("couldn't find frontend at {frontend_path:?}"); + } + let status = std::process::Command::new("sh") + .current_dir(frontend_path) + .arg("./build.sh") + .status()?; + if !status.success() { + return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend)); + } + } + } + + println!("c"); + let features = matches.get_many::("FEATURES") + .unwrap_or_default() + .map(|s| s.to_owned()) + .collect::>() + .join(","); + + println!("d"); + let results: Vec)>> = fs::read_dir(&packages_dir)? + .filter_map(|entry| { + let entry_path = match entry { + Ok(e) => e.path(), + Err(_) => return None, + }; + let child_pkg_path = entry_path.join("pkg"); + if !child_pkg_path.exists() { + // don't run on, e.g., `.DS_Store` + return None; + } + Some(build_and_zip_package( + entry_path.clone(), + child_pkg_path.to_str().unwrap(), + &features, + )) + }) + .collect(); + + println!("e"); + // Process results, e.g., write to `bootstrapped_processes.rs` + // This part remains sequential + let mut bootstrapped_processes = vec![]; + writeln!( + bootstrapped_processes, + "pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &[" + )?; + + println!("f"); + let target_dir = top_level_dir.join("target"); + let target_packages_dir = target_dir.join("packages"); + let target_metadatas_dir = target_dir.join("metadatas"); + for path in [&target_packages_dir, &target_metadatas_dir] { + if !path.exists() { + fs::create_dir_all(path)?; + } + } + + println!("g"); + for result in results { + match result { + Ok((entry_path, zip_filename, zip_contents)) => { + let metadata_path = entry_path.join("metadata.json"); + let metadata_file_name = { + let metadata_file_stem = entry_path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap(); + format!("{metadata_file_stem}.json") + }; + let new_metadata_path = target_metadatas_dir.join(metadata_file_name); + fs::copy(&metadata_path, &new_metadata_path)?; + let zip_path = target_packages_dir.join(&zip_filename); + fs::write(&zip_path, &zip_contents)?; + + writeln!( + bootstrapped_processes, + " (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\"),),", + zip_filename, new_metadata_path.display(), zip_path.display(), + )?; + } + Err(e) => return Err(e), + } + } + + println!("h"); + writeln!(bootstrapped_processes, "];")?; + let bootstrapped_processes_path = target_packages_dir.join("bootstrapped_processes.rs"); + fs::write(&bootstrapped_processes_path, bootstrapped_processes)?; + + println!("i"); + let package_zip_path = target_dir.join("packages.zip"); + let package_zip_contents = zip_directory(&target_packages_dir)?; + fs::write(package_zip_path, package_zip_contents)?; + + println!("j"); + Ok(()) +} From c06eb909085b09e37841571be150760374100c2f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 21:52:00 +0000 Subject: [PATCH 03/14] Format Rust code using rustfmt --- scripts/build_package/src/main.rs | 34 +++++++++++++++++-------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/scripts/build_package/src/main.rs b/scripts/build_package/src/main.rs index f18a8cae..1e52e6d3 100644 --- a/scripts/build_package/src/main.rs +++ b/scripts/build_package/src/main.rs @@ -72,17 +72,20 @@ fn build_and_zip_package( fn main() -> anyhow::Result<()> { let matches = Command::new("build_package") .about("Build the core Kinode packages.") - .arg(Arg::new("FEATURES") - .long("features") - .help("List of features to compile packages with") - .action(clap::ArgAction::Append)) - .arg(Arg::new("SKIP_FRONTEND") - .long("skip-build-frontend") - .help("Skip building the frontend") - .action(clap::ArgAction::SetTrue)) + .arg( + Arg::new("FEATURES") + .long("features") + .help("List of features to compile packages with") + .action(clap::ArgAction::Append), + ) + .arg( + Arg::new("SKIP_FRONTEND") + .long("skip-build-frontend") + .help("Skip building the frontend") + .action(clap::ArgAction::SetTrue), + ) .get_matches(); - println!("a"); // kinode/target/debug/build_package let current_exe_dir = std::env::current_exe() // build_package @@ -128,7 +131,8 @@ fn main() -> anyhow::Result<()> { } println!("c"); - let features = matches.get_many::("FEATURES") + let features = matches + .get_many::("FEATURES") .unwrap_or_default() .map(|s| s.to_owned()) .collect::>() @@ -179,10 +183,8 @@ fn main() -> anyhow::Result<()> { Ok((entry_path, zip_filename, zip_contents)) => { let metadata_path = entry_path.join("metadata.json"); let metadata_file_name = { - let metadata_file_stem = entry_path - .file_stem() - .and_then(|s| s.to_str()) - .unwrap(); + let metadata_file_stem = + entry_path.file_stem().and_then(|s| s.to_str()).unwrap(); format!("{metadata_file_stem}.json") }; let new_metadata_path = target_metadatas_dir.join(metadata_file_name); @@ -193,7 +195,9 @@ fn main() -> anyhow::Result<()> { writeln!( bootstrapped_processes, " (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\"),),", - zip_filename, new_metadata_path.display(), zip_path.display(), + zip_filename, + new_metadata_path.display(), + zip_path.display(), )?; } Err(e) => return Err(e), From 2025b8c26b45cc6dafd01587b5e968b353c2e006 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 7 Oct 2024 19:35:57 -0700 Subject: [PATCH 04/14] build: use new build system (nearly 100%) --- Cargo.lock | 41 +---- kinode/Cargo.toml | 6 - kinode/build.rs | 239 +++--------------------------- kinode/src/main.rs | 2 - kinode/src/state.rs | 38 +++-- lib/src/core.rs | 20 ++- lib/src/lib.rs | 2 +- scripts/build_package/Cargo.toml | 1 + scripts/build_package/src/main.rs | 56 ++----- 9 files changed, 77 insertions(+), 328 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 87af1b3e..d5efb296 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1410,6 +1410,7 @@ dependencies = [ "clap", "fs-err", "kit", + "serde_json", "tokio", "walkdir", "zip 0.6.6", @@ -2641,18 +2642,6 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" -[[package]] -name = "filetime" -version = "0.2.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550" -dependencies = [ - "cfg-if", - "libc", - "libredox", - "windows-sys 0.59.0", -] - [[package]] name = "fixed-hash" version = "0.8.0" @@ -3667,7 +3656,6 @@ dependencies = [ "clap", "crossterm", "dashmap 5.5.3", - "flate2", "futures", "generic-array", "hex", @@ -3675,7 +3663,6 @@ dependencies = [ "http 1.1.0", "indexmap", "jwt", - "kit", "lazy_static", "lib", "nohash-hasher", @@ -3695,19 +3682,16 @@ dependencies = [ "snow", "socket2 0.5.7", "static_dir", - "tar", "thiserror", "tokio", "tokio-tungstenite 0.21.0", "unicode-segmentation", "unicode-width", "url", - "walkdir", "warp", "wasi-common", "wasmtime", "wasmtime-wasi", - "zip 0.6.6", "zip 1.1.4", ] @@ -3895,7 +3879,6 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", - "redox_syscall", ] [[package]] @@ -5953,17 +5936,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" -[[package]] -name = "tar" -version = "0.4.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909" -dependencies = [ - "filetime", - "libc", - "xattr", -] - [[package]] name = "target-lexicon" version = "0.12.16" @@ -7727,17 +7699,6 @@ dependencies = [ "tap", ] -[[package]] -name = "xattr" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" -dependencies = [ - "libc", - "linux-raw-sys", - "rustix", -] - [[package]] name = "zerocopy" version = "0.7.35" diff --git a/kinode/Cargo.toml b/kinode/Cargo.toml index 9c5ce11f..7c481238 100644 --- a/kinode/Cargo.toml +++ b/kinode/Cargo.toml @@ -14,12 +14,6 @@ path = "src/main.rs" [build-dependencies] anyhow = "1.0.71" -flate2 = "1.0" -kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" } -tar = "0.4" -tokio = "1.28" -walkdir = "2.4" -zip = "0.6" [features] simulation-mode = [] diff --git a/kinode/build.rs b/kinode/build.rs index 388bb658..8250dac9 100644 --- a/kinode/build.rs +++ b/kinode/build.rs @@ -1,227 +1,36 @@ -use std::{ - fs::{self, File}, - io::{BufReader, Cursor, Read, Write}, - path::{Path, PathBuf}, -}; +use std::path::PathBuf; -use flate2::read::GzDecoder; -use tar::Archive; -use zip::write::FileOptions; - -macro_rules! p { - ($($tokens: tt)*) => { - println!("cargo:warning={}", format!($($tokens)*)) - } -} - -/// get cargo features to compile packages with -fn get_features() -> String { - let mut features = "".to_string(); - for (key, _) in std::env::vars() { - if key.starts_with("CARGO_FEATURE_") { - let feature = key - .trim_start_matches("CARGO_FEATURE_") - .to_lowercase() - .replace("_", "-"); - features.push_str(&feature); - } - } - features -} - -/// print `cargo:rerun-if-changed=PATH` for each path of interest -fn output_reruns(dir: &Path) { - // Check files individually - if let Ok(entries) = fs::read_dir(dir) { - for entry in entries.filter_map(|e| e.ok()) { - let path = entry.path(); - if path.is_dir() { - if let Some(dirname) = path.file_name().and_then(|n| n.to_str()) { - if dirname == "ui" || dirname == "target" { - // do not prompt a rerun if only UI/build files have changed - continue; - } - // If the entry is a directory not in rerun_files, recursively walk it - output_reruns(&path); - } - } else { - if let Some(filename) = path.file_name().and_then(|n| n.to_str()) { - if filename.ends_with(".zip") || filename.ends_with(".wasm") { - // do not prompt a rerun for compiled outputs - continue; - } - // any other changed file within a package subdir prompts a rerun - println!("cargo::rerun-if-changed={}", path.display()); - } - } - } - } -} - -fn _untar_gz_file(path: &Path, dest: &Path) -> std::io::Result<()> { - // Open the .tar.gz file - let tar_gz = File::open(path)?; - let tar_gz_reader = BufReader::new(tar_gz); - - // Decode the gzip layer - let tar = GzDecoder::new(tar_gz_reader); - - // Create a new archive from the tar file - let mut archive = Archive::new(tar); - - // Unpack the archive into the specified destination directory - archive.unpack(dest)?; - - Ok(()) -} - -fn build_and_zip_package( - entry_path: PathBuf, - parent_pkg_path: &str, - features: &str, -) -> anyhow::Result<(String, String, Vec)> { - let rt = tokio::runtime::Runtime::new().unwrap(); - rt.block_on(async { - kit::build::execute( - &entry_path, - true, - false, - true, - features, - None, - None, - None, - vec![], - vec![], - false, - false, - false, - ) - .await - .map_err(|e| anyhow::anyhow!("{:?}", e))?; - - let mut writer = Cursor::new(Vec::new()); - let options = FileOptions::default() - .compression_method(zip::CompressionMethod::Deflated) - .unix_permissions(0o755) - .last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap()); - { - let mut zip = zip::ZipWriter::new(&mut writer); - - for sub_entry in walkdir::WalkDir::new(parent_pkg_path) { - let sub_entry = sub_entry?; - let path = sub_entry.path(); - let name = path.strip_prefix(Path::new(parent_pkg_path))?; - - if path.is_file() { - zip.start_file(name.to_string_lossy(), options)?; - let mut file = File::open(path)?; - let mut buffer = Vec::new(); - file.read_to_end(&mut buffer)?; - zip.write_all(&buffer)?; - } else if !name.as_os_str().is_empty() { - zip.add_directory(name.to_string_lossy(), options)?; - } - } - zip.finish()?; - } - - let zip_contents = writer.into_inner(); - let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap()); - Ok((entry_path.display().to_string(), zip_filename, zip_contents)) - }) -} +const CANONICAL_PACKAGES_ZIP_PATH: &str = "../target/packages.zip"; fn main() -> anyhow::Result<()> { - if std::env::var("SKIP_BUILD_SCRIPT").is_ok() { - p!("skipping build script"); - return Ok(()); - } - - let pwd = std::env::current_dir()?; - let parent_dir = pwd.parent().unwrap(); - let packages_dir = pwd.join("packages"); - - if std::env::var("SKIP_BUILD_FRONTEND").is_ok() { - p!("skipping frontend builds"); - } else { - // build core frontends - let core_frontends = vec![ - "src/register-ui", - "packages/app_store/ui", - "packages/homepage/ui", - // chess when brought in - ]; - - // for each frontend, execute build.sh - for frontend in core_frontends { - let status = std::process::Command::new("sh") - .current_dir(pwd.join(frontend)) - .arg("./build.sh") - .status()?; - if !status.success() { - return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend)); + let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") { + Err(_) => { + let build_package_script_path = PathBuf::from("../scripts/build_package"); + let mut child = std::process::Command::new("cargo") + .arg("run") + .current_dir(&build_package_script_path) + .spawn()?; + let result = child.wait()?; + if !result.success() { + return Err(anyhow::anyhow!("Failed to build packages.")); } + CANONICAL_PACKAGES_ZIP_PATH.to_string() } + Ok(env_var) => env_var, + }; + let path = PathBuf::from(&path_to_packages_zip); + if !path.exists() { + return Err(anyhow::anyhow!( + "Path to packages {path_to_packages_zip} does not exist." + )); } - output_reruns(&packages_dir); - - let features = get_features(); - - let results: Vec)>> = fs::read_dir(&packages_dir)? - .filter_map(|entry| { - let entry_path = match entry { - Ok(e) => e.path(), - Err(_) => return None, - }; - let child_pkg_path = entry_path.join("pkg"); - if !child_pkg_path.exists() { - // don't run on, e.g., `.DS_Store` - return None; - } - Some(build_and_zip_package( - entry_path.clone(), - child_pkg_path.to_str().unwrap(), - &features, - )) - }) - .collect(); - - // Process results, e.g., write to `bootstrapped_processes.rs` - // This part remains sequential - let mut bootstrapped_processes = vec![]; - writeln!( - bootstrapped_processes, - "pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &[" - )?; - - for result in results { - match result { - Ok((entry_path, zip_filename, zip_contents)) => { - // Further processing, like saving ZIP files and updating bootstrapped_processes - let metadata_path = format!("{}/metadata.json", entry_path); - let zip_path = format!("{}/target/{}", parent_dir.display(), zip_filename); - fs::write(&zip_path, &zip_contents)?; - - writeln!( - bootstrapped_processes, - " (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\")),", - zip_filename, metadata_path, zip_path, - )?; - } - Err(e) => return Err(e), - } + let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?; + let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?; + if path_to_packages_zip_path != canonical_packages_zip_path { + std::fs::copy(path_to_packages_zip_path, CANONICAL_PACKAGES_ZIP_PATH)?; } - writeln!(bootstrapped_processes, "];")?; - let target_dir = pwd.join("../target"); - if !target_dir.exists() { - fs::create_dir_all(&target_dir)?; - } - let bootstrapped_processes_path = target_dir.join("bootstrapped_processes.rs"); - fs::write(&bootstrapped_processes_path, bootstrapped_processes)?; - let version = if let Ok(version) = std::env::var("DOCKER_BUILD_IMAGE_VERSION") { // embed the DOCKER_BUILD_IMAGE_VERSION version diff --git a/kinode/src/main.rs b/kinode/src/main.rs index e8e053d9..ade4fe50 100644 --- a/kinode/src/main.rs +++ b/kinode/src/main.rs @@ -1,5 +1,3 @@ -#![feature(async_closure)] -#![feature(btree_extract_if)] use anyhow::Result; use clap::{arg, value_parser, Command}; use lib::types::core::{ diff --git a/kinode/src/state.rs b/kinode/src/state.rs index a86c3e35..d8683f26 100644 --- a/kinode/src/state.rs +++ b/kinode/src/state.rs @@ -14,7 +14,8 @@ use std::{ }; use tokio::{fs, io::AsyncWriteExt, sync::Mutex}; -include!("../../target/bootstrapped_processes.rs"); +static PACKAGES_ZIP: &[u8] = include_bytes!("../../target/packages.zip"); +const FILE_TO_METADATA: &str = "file_to_metadata.json"; pub async fn load_state( our_name: String, @@ -381,7 +382,7 @@ async fn bootstrap( current.capabilities.extend(runtime_caps.clone()); } - let packages = get_zipped_packages().await; + let packages = get_zipped_packages(); for (package_metadata, mut package) in packages.clone() { let package_name = package_metadata.properties.package_name.as_str(); @@ -412,7 +413,7 @@ async fn bootstrap( let mut zip_file = fs::File::create(format!("{}/{}.zip", &pkg_path, &our_drive_name)).await?; let package_zip_bytes = package.clone().into_inner().into_inner(); - zip_file.write_all(package_zip_bytes).await?; + zip_file.write_all(&package_zip_bytes).await?; // for each file in package.zip, write to vfs folder for i in 0..package.len() { @@ -713,20 +714,31 @@ fn sign_cap(cap: Capability, keypair: Arc) -> Vec } /// read in `include!()`ed .zip package files -async fn get_zipped_packages() -> Vec<( +fn get_zipped_packages() -> Vec<( Erc721Metadata, - zip::ZipArchive>, + zip::ZipArchive>>, )> { let mut packages = Vec::new(); - for (package_name, metadata_bytes, bytes) in BOOTSTRAPPED_PROCESSES.iter() { - if let Ok(zip) = zip::ZipArchive::new(std::io::Cursor::new(*bytes)) { - if let Ok(metadata) = serde_json::from_slice::(metadata_bytes) { - packages.push((metadata, zip)); - } else { - println!("fs: metadata for package {package_name} is not valid Erc721Metadata!\r",); - } - } + let mut packages_zip = zip::ZipArchive::new(std::io::Cursor::new(PACKAGES_ZIP)).unwrap(); + let mut file_to_metadata = vec![]; + packages_zip + .by_name(FILE_TO_METADATA) + .unwrap() + .read_to_end(&mut file_to_metadata) + .unwrap(); + let file_to_metadata: HashMap = + serde_json::from_slice(&file_to_metadata).unwrap(); + + for (file_name, metadata) in file_to_metadata { + let mut zip_bytes = vec![]; + packages_zip + .by_name(&file_name) + .unwrap() + .read_to_end(&mut zip_bytes) + .unwrap(); + let zip_archive = zip::ZipArchive::new(std::io::Cursor::new(zip_bytes)).unwrap(); + packages.push((metadata, zip_archive)); } packages diff --git a/lib/src/core.rs b/lib/src/core.rs index b8a28676..43296cf0 100644 --- a/lib/src/core.rs +++ b/lib/src/core.rs @@ -1102,10 +1102,12 @@ impl Identity { match &self.routing { NodeRouting::Routers(_) => None, NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => { - if let Some(port) = ports.get("ws") - && *port != 0 - { - Some((ip, port)) + if let Some(port) = ports.get("ws") { + if *port != 0 { + Some((ip, port)) + } else { + None + } } else { None } @@ -1116,10 +1118,12 @@ impl Identity { match &self.routing { NodeRouting::Routers(_) => None, NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => { - if let Some(port) = ports.get("tcp") - && *port != 0 - { - Some((ip, port)) + if let Some(port) = ports.get("tcp") { + if *port != 0 { + Some((ip, port)) + } else { + None + } } else { None } diff --git a/lib/src/lib.rs b/lib/src/lib.rs index e99aac02..d9306949 100644 --- a/lib/src/lib.rs +++ b/lib/src/lib.rs @@ -1,4 +1,4 @@ -#![feature(let_chains)] +//#![feature(let_chains)] pub mod core; pub mod eth; diff --git a/scripts/build_package/Cargo.toml b/scripts/build_package/Cargo.toml index 8ac08b2a..824917a6 100644 --- a/scripts/build_package/Cargo.toml +++ b/scripts/build_package/Cargo.toml @@ -8,6 +8,7 @@ anyhow = "1.0.71" clap = "4" fs-err = "2.11" kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" } +serde_json = "1" tokio = "1.28" walkdir = "2.4" zip = "0.6" diff --git a/scripts/build_package/src/main.rs b/scripts/build_package/src/main.rs index 1e52e6d3..376d8556 100644 --- a/scripts/build_package/src/main.rs +++ b/scripts/build_package/src/main.rs @@ -86,7 +86,6 @@ fn main() -> anyhow::Result<()> { ) .get_matches(); - println!("a"); // kinode/target/debug/build_package let current_exe_dir = std::env::current_exe() // build_package .unwrap(); @@ -100,9 +99,6 @@ fn main() -> anyhow::Result<()> { let kinode_dir = top_level_dir.join("kinode"); let packages_dir = kinode_dir.join("packages"); - println!("{current_exe_dir:?} {top_level_dir:?} {kinode_dir:?} {packages_dir:?}"); - - println!("b"); if matches.get_flag("SKIP_FRONTEND") { println!("skipping frontend builds"); } else { @@ -130,7 +126,6 @@ fn main() -> anyhow::Result<()> { } } - println!("c"); let features = matches .get_many::("FEATURES") .unwrap_or_default() @@ -138,7 +133,6 @@ fn main() -> anyhow::Result<()> { .collect::>() .join(","); - println!("d"); let results: Vec)>> = fs::read_dir(&packages_dir)? .filter_map(|entry| { let entry_path = match entry { @@ -158,62 +152,38 @@ fn main() -> anyhow::Result<()> { }) .collect(); - println!("e"); - // Process results, e.g., write to `bootstrapped_processes.rs` - // This part remains sequential - let mut bootstrapped_processes = vec![]; - writeln!( - bootstrapped_processes, - "pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &[" - )?; + let mut file_to_metadata = std::collections::HashMap::new(); - println!("f"); let target_dir = top_level_dir.join("target"); let target_packages_dir = target_dir.join("packages"); - let target_metadatas_dir = target_dir.join("metadatas"); - for path in [&target_packages_dir, &target_metadatas_dir] { - if !path.exists() { - fs::create_dir_all(path)?; - } + // fresh + if target_packages_dir.exists() { + fs::remove_dir_all(&target_packages_dir)?; } + fs::create_dir_all(&target_packages_dir)?; - println!("g"); for result in results { match result { Ok((entry_path, zip_filename, zip_contents)) => { let metadata_path = entry_path.join("metadata.json"); - let metadata_file_name = { - let metadata_file_stem = - entry_path.file_stem().and_then(|s| s.to_str()).unwrap(); - format!("{metadata_file_stem}.json") - }; - let new_metadata_path = target_metadatas_dir.join(metadata_file_name); - fs::copy(&metadata_path, &new_metadata_path)?; + let metadata_contents = fs::read_to_string(&metadata_path)?; + let metadata_contents: serde_json::Value = serde_json::from_str(&metadata_contents)?; + file_to_metadata.insert(zip_filename.clone(), metadata_contents); let zip_path = target_packages_dir.join(&zip_filename); fs::write(&zip_path, &zip_contents)?; - - writeln!( - bootstrapped_processes, - " (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\"),),", - zip_filename, - new_metadata_path.display(), - zip_path.display(), - )?; } - Err(e) => return Err(e), + Err(e) => return Err(anyhow::anyhow!("{e:?}")), } } - println!("h"); - writeln!(bootstrapped_processes, "];")?; - let bootstrapped_processes_path = target_packages_dir.join("bootstrapped_processes.rs"); - fs::write(&bootstrapped_processes_path, bootstrapped_processes)?; + let file_to_metadata = serde_json::to_value(&file_to_metadata)?; + let file_to_metadata = serde_json::to_string_pretty(&file_to_metadata)?; + let file_to_metadata_path = target_packages_dir.join("file_to_metadata.json"); + fs::write(&file_to_metadata_path, file_to_metadata)?; - println!("i"); let package_zip_path = target_dir.join("packages.zip"); let package_zip_contents = zip_directory(&target_packages_dir)?; fs::write(package_zip_path, package_zip_contents)?; - println!("j"); Ok(()) } From 275fa494ec8134c56d97697522af6a726728e521 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 02:36:26 +0000 Subject: [PATCH 05/14] Format Rust code using rustfmt --- kinode/src/state.rs | 7 ++----- scripts/build_package/src/main.rs | 3 ++- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/kinode/src/state.rs b/kinode/src/state.rs index d8683f26..7b14ad0f 100644 --- a/kinode/src/state.rs +++ b/kinode/src/state.rs @@ -14,7 +14,7 @@ use std::{ }; use tokio::{fs, io::AsyncWriteExt, sync::Mutex}; -static PACKAGES_ZIP: &[u8] = include_bytes!("../../target/packages.zip"); +static PACKAGES_ZIP: &[u8] = include_bytes!("../../target/packages.zip"); const FILE_TO_METADATA: &str = "file_to_metadata.json"; pub async fn load_state( @@ -714,10 +714,7 @@ fn sign_cap(cap: Capability, keypair: Arc) -> Vec } /// read in `include!()`ed .zip package files -fn get_zipped_packages() -> Vec<( - Erc721Metadata, - zip::ZipArchive>>, -)> { +fn get_zipped_packages() -> Vec<(Erc721Metadata, zip::ZipArchive>>)> { let mut packages = Vec::new(); let mut packages_zip = zip::ZipArchive::new(std::io::Cursor::new(PACKAGES_ZIP)).unwrap(); diff --git a/scripts/build_package/src/main.rs b/scripts/build_package/src/main.rs index 376d8556..a76fcb32 100644 --- a/scripts/build_package/src/main.rs +++ b/scripts/build_package/src/main.rs @@ -167,7 +167,8 @@ fn main() -> anyhow::Result<()> { Ok((entry_path, zip_filename, zip_contents)) => { let metadata_path = entry_path.join("metadata.json"); let metadata_contents = fs::read_to_string(&metadata_path)?; - let metadata_contents: serde_json::Value = serde_json::from_str(&metadata_contents)?; + let metadata_contents: serde_json::Value = + serde_json::from_str(&metadata_contents)?; file_to_metadata.insert(zip_filename.clone(), metadata_contents); let zip_path = target_packages_dir.join(&zip_filename); fs::write(&zip_path, &zip_contents)?; From a9df137402e3fc4deb9083abd610d5553adbc8a1 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 7 Oct 2024 20:20:48 -0700 Subject: [PATCH 06/14] build: get it working --- Cargo.lock | 2 +- Cargo.toml | 2 +- kinode/build.rs | 27 ++++++++++--------- .../Cargo.toml | 2 +- .../src/main.rs | 5 ++-- 5 files changed, 20 insertions(+), 18 deletions(-) rename scripts/{build_package => build_packages}/Cargo.toml (90%) rename scripts/{build_package => build_packages}/src/main.rs (97%) diff --git a/Cargo.lock b/Cargo.lock index d5efb296..0c764f2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1403,7 +1403,7 @@ dependencies = [ ] [[package]] -name = "build_package" +name = "build_packages" version = "0.1.0" dependencies = [ "anyhow", diff --git a/Cargo.toml b/Cargo.toml index 9b14b678..f44e6005 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,7 +26,7 @@ members = [ "kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch", "kinode/packages/terminal/kill", "kinode/packages/terminal/m", "kinode/packages/terminal/top", "kinode/packages/terminal/net_diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers", - "kinode/packages/tester/tester", "scripts/build_package", + "kinode/packages/tester/tester", "scripts/build_packages", ] default-members = ["lib"] resolver = "2" diff --git a/kinode/build.rs b/kinode/build.rs index 8250dac9..5ac4a90e 100644 --- a/kinode/build.rs +++ b/kinode/build.rs @@ -2,21 +2,24 @@ use std::path::PathBuf; const CANONICAL_PACKAGES_ZIP_PATH: &str = "../target/packages.zip"; +macro_rules! p { + ($($tokens: tt)*) => { + println!("cargo:warning={}", format!($($tokens)*)) + } +} + fn main() -> anyhow::Result<()> { let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") { - Err(_) => { - let build_package_script_path = PathBuf::from("../scripts/build_package"); - let mut child = std::process::Command::new("cargo") - .arg("run") - .current_dir(&build_package_script_path) - .spawn()?; - let result = child.wait()?; - if !result.success() { - return Err(anyhow::anyhow!("Failed to build packages.")); - } - CANONICAL_PACKAGES_ZIP_PATH.to_string() - } Ok(env_var) => env_var, + Err(_) => { + let canonical_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH); + if canonical_path.exists() { + p!("No path given via PATH_TO_PACKAGES_ZIP envvar. Defaulting to path of `kinode/target/packages.zip`."); + CANONICAL_PACKAGES_ZIP_PATH.to_string() + } else { + return Err(anyhow::anyhow!("You must build packages.zip with scripts/build_packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated).")); + } + } }; let path = PathBuf::from(&path_to_packages_zip); if !path.exists() { diff --git a/scripts/build_package/Cargo.toml b/scripts/build_packages/Cargo.toml similarity index 90% rename from scripts/build_package/Cargo.toml rename to scripts/build_packages/Cargo.toml index 824917a6..fe507b3e 100644 --- a/scripts/build_package/Cargo.toml +++ b/scripts/build_packages/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "build_package" +name = "build_packages" version = "0.1.0" edition = "2021" diff --git a/scripts/build_package/src/main.rs b/scripts/build_packages/src/main.rs similarity index 97% rename from scripts/build_package/src/main.rs rename to scripts/build_packages/src/main.rs index a76fcb32..f80a99c9 100644 --- a/scripts/build_package/src/main.rs +++ b/scripts/build_packages/src/main.rs @@ -70,7 +70,7 @@ fn build_and_zip_package( } fn main() -> anyhow::Result<()> { - let matches = Command::new("build_package") + let matches = Command::new("build_packages") .about("Build the core Kinode packages.") .arg( Arg::new("FEATURES") @@ -167,8 +167,7 @@ fn main() -> anyhow::Result<()> { Ok((entry_path, zip_filename, zip_contents)) => { let metadata_path = entry_path.join("metadata.json"); let metadata_contents = fs::read_to_string(&metadata_path)?; - let metadata_contents: serde_json::Value = - serde_json::from_str(&metadata_contents)?; + let metadata_contents: serde_json::Value = serde_json::from_str(&metadata_contents)?; file_to_metadata.insert(zip_filename.clone(), metadata_contents); let zip_path = target_packages_dir.join(&zip_filename); fs::write(&zip_path, &zip_contents)?; From 92ab3cafa9748b88d2158ece023bdf030e1dc35e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 03:21:11 +0000 Subject: [PATCH 07/14] Format Rust code using rustfmt --- scripts/build_packages/src/main.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/build_packages/src/main.rs b/scripts/build_packages/src/main.rs index f80a99c9..bf0c8974 100644 --- a/scripts/build_packages/src/main.rs +++ b/scripts/build_packages/src/main.rs @@ -167,7 +167,8 @@ fn main() -> anyhow::Result<()> { Ok((entry_path, zip_filename, zip_contents)) => { let metadata_path = entry_path.join("metadata.json"); let metadata_contents = fs::read_to_string(&metadata_path)?; - let metadata_contents: serde_json::Value = serde_json::from_str(&metadata_contents)?; + let metadata_contents: serde_json::Value = + serde_json::from_str(&metadata_contents)?; file_to_metadata.insert(zip_filename.clone(), metadata_contents); let zip_path = target_packages_dir.join(&zip_filename); fs::write(&zip_path, &zip_contents)?; From b6382e01908ecb89def647a15ef1fd708a92da74 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 7 Oct 2024 22:06:28 -0700 Subject: [PATCH 08/14] build: use new script in build-release.py --- README.md | 14 ++++++------ scripts/build-release.py | 36 +++++++++++++++++++++++++----- scripts/build_packages/src/main.rs | 25 +++++++++++++++++---- 3 files changed, 58 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 4a7bf501..157d2eb0 100644 --- a/README.md +++ b/README.md @@ -33,9 +33,6 @@ git clone git@github.com:kinode-dao/kinode.git cd kinode cargo install wasm-tools rustup install nightly -rustup target add wasm32-wasi -rustup target add wasm32-wasi --toolchain nightly -rustup target add wasm32-wasip1 rustup target add wasm32-wasip1 --toolchain nightly cargo install cargo-wasi @@ -43,11 +40,14 @@ cargo install cargo-wasi # https://docs.npmjs.com/downloading-and-installing-node-js-and-npm # If you want to skip this step, run cargo build with the environment variable SKIP_BUILD_FRONTEND=true -# Build the runtime, along with a number of "distro" Wasm modules. -# The compiled binary will be at `kinode/target/debug/kinode` -# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`) +# Build the "distro" Wasm modules. +# Then, build the runtime. +# The compiled packages will be at `kinode/target/packages.zip`. +# The compiled binary will be at `kinode/target/debug/kinode`. +# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`). -cargo +nightly build -p kinode +cargo run -p build_packages +cargo build -p kinode ``` ## Security Status diff --git a/scripts/build-release.py b/scripts/build-release.py index 4fd9fa09..8170dc05 100755 --- a/scripts/build-release.py +++ b/scripts/build-release.py @@ -27,14 +27,39 @@ def build_and_move(feature, tmp_dir, architecture, os_name): zip_prefix = f"kinode-{architecture}-{os_name}" release_env = os.environ.copy() - release_env["CARGO_PROFILE_RELEASE_LTO"] = f"fat" - release_env["CARGO_PROFILE_RELEASE_CODEGEN_UNITS"] = f"1" - release_env["CARGO_PROFILE_RELEASE_STRIP"] = f"symbols" + release_env["CARGO_PROFILE_RELEASE_LTO"] = "fat" + release_env["CARGO_PROFILE_RELEASE_CODEGEN_UNITS"] = "1" + release_env["CARGO_PROFILE_RELEASE_STRIP"] = "symbols" if feature: - subprocess.run(["cargo", "+nightly", "build", "--release", "-p", "kinode", "--features", feature], check=True, env=release_env) + release_env["PATH_TO_PACKAGES_ZIP"] = f"../target/packages-{feature}.zip" + subprocess.run( + ["cargo", "run", "-p", "build_packages", "--", "--features", feature], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + subprocess.run( + ["cargo", "build", "--release", "-p", "kinode", "--features", feature], + check=True, + env=release_env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) zip_name = f"{zip_prefix}-{feature}.zip" else: - subprocess.run(["cargo", "+nightly", "build", "--release", "-p", "kinode"], check=True, env=release_env) + subprocess.run( + ["cargo", "run", "-p", "build_packages"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + subprocess.run( + ["cargo", "build", "--release", "-p", "kinode"], + check=True, + env=release_env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) zip_name = f"{zip_prefix}.zip" # Move and rename the binary @@ -74,4 +99,3 @@ def main(): if __name__ == "__main__": main() - diff --git a/scripts/build_packages/src/main.rs b/scripts/build_packages/src/main.rs index bf0c8974..c5edbc3d 100644 --- a/scripts/build_packages/src/main.rs +++ b/scripts/build_packages/src/main.rs @@ -84,6 +84,12 @@ fn main() -> anyhow::Result<()> { .help("Skip building the frontend") .action(clap::ArgAction::SetTrue), ) + .arg( + Arg::new("OUTPUT_FILENAME") + .long("output-filename") + .help("Set output filename (default: packages-{features}.zip)") + .action(clap::ArgAction::Set), + ) .get_matches(); // kinode/target/debug/build_package @@ -126,12 +132,13 @@ fn main() -> anyhow::Result<()> { } } - let features = matches + let mut features = matches .get_many::("FEATURES") .unwrap_or_default() .map(|s| s.to_owned()) - .collect::>() - .join(","); + .collect::>(); + features.sort(); + let features = features.join(","); let results: Vec)>> = fs::read_dir(&packages_dir)? .filter_map(|entry| { @@ -182,7 +189,17 @@ fn main() -> anyhow::Result<()> { let file_to_metadata_path = target_packages_dir.join("file_to_metadata.json"); fs::write(&file_to_metadata_path, file_to_metadata)?; - let package_zip_path = target_dir.join("packages.zip"); + let package_zip_file_name = match matches.get_one::("OUTPUT_FILENAME") { + Some(filename) => filename.to_string(), + None => { + if features.is_empty() { + "packages.zip".to_string() + } else { + format!("packages-{features}.zip") + } + } + }; + let package_zip_path = target_dir.join(package_zip_file_name); let package_zip_contents = zip_directory(&target_packages_dir)?; fs::write(package_zip_path, package_zip_contents)?; From 821b7f9993bafe50815fd34484ee26c64e19f28d Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 8 Oct 2024 11:49:05 -0700 Subject: [PATCH 09/14] update README re: build --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 157d2eb0..84a8bafd 100644 --- a/README.md +++ b/README.md @@ -28,9 +28,9 @@ On certain operating systems, you may need to install these dependencies if they git clone git@github.com:kinode-dao/kinode.git -# Get some stuff so we can build Wasm. +# Install Rust and some `cargo` tools so we can build the runtime and Wasm. -cd kinode +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh cargo install wasm-tools rustup install nightly rustup target add wasm32-wasip1 --toolchain nightly @@ -38,14 +38,14 @@ cargo install cargo-wasi # Install NPM so we can build frontends for "distro" packages. # https://docs.npmjs.com/downloading-and-installing-node-js-and-npm -# If you want to skip this step, run cargo build with the environment variable SKIP_BUILD_FRONTEND=true +# If you want to skip this step, build the packages with `cargo run -p build_packages -- --skip-build-frontend` to neglect building the frontends -# Build the "distro" Wasm modules. -# Then, build the runtime. +# Build the "distro" Wasm modules, then, build the runtime. # The compiled packages will be at `kinode/target/packages.zip`. # The compiled binary will be at `kinode/target/debug/kinode`. # OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`). +cd kinode cargo run -p build_packages cargo build -p kinode ``` From 29c2afcdfc26fe856d045088f123dbdf93d60d35 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 8 Oct 2024 14:02:47 -0700 Subject: [PATCH 10/14] simplify & improve docker build cleanup --- Dockerfile.buildruntime | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile.buildruntime b/Dockerfile.buildruntime index 3b4bdd31..91e1c902 100644 --- a/Dockerfile.buildruntime +++ b/Dockerfile.buildruntime @@ -11,4 +11,4 @@ WORKDIR /input # Set the default command to run the build script # TODO: once build is idempotent, remove the `rm -rf` line -CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && rm -rf target/ kinode/packages/*/pkg/*wasm kinode/packages/*/*/target/ kinode/packages/*/pkg/api.zip kinode/packages/*/*/wit kinode/packages/app_store/pkg/ui kinode/packages/homepage/pkg/ui kinode/src/register-ui/build && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && rm -rf target/ kinode/packages/*/pkg/*wasm kinode/packages/*/*/target/ kinode/packages/*/pkg/api.zip kinode/packages/*/*/wit kinode/packages/app_store/pkg/ui kinode/packages/homepage/pkg/ui kinode/src/register-ui/build"] +CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && find . -user root -print0 2>/dev/null | xargs -0 rm -rf"] From 82a60c8181dff41325f1e087c2b0dd67f5656213 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 8 Oct 2024 15:01:23 -0700 Subject: [PATCH 11/14] build: use `kit` to build FEs --- scripts/build_packages/src/main.rs | 33 +++++------------------------- 1 file changed, 5 insertions(+), 28 deletions(-) diff --git a/scripts/build_packages/src/main.rs b/scripts/build_packages/src/main.rs index c5edbc3d..20a63099 100644 --- a/scripts/build_packages/src/main.rs +++ b/scripts/build_packages/src/main.rs @@ -41,13 +41,14 @@ fn zip_directory(dir_path: &Path) -> anyhow::Result> { fn build_and_zip_package( entry_path: PathBuf, parent_pkg_path: &str, + skip_frontend: bool, features: &str, ) -> anyhow::Result<(PathBuf, String, Vec)> { let rt = tokio::runtime::Runtime::new().unwrap(); rt.block_on(async { kit::build::execute( &entry_path, - true, + skip_frontend, false, true, features, @@ -105,33 +106,6 @@ fn main() -> anyhow::Result<()> { let kinode_dir = top_level_dir.join("kinode"); let packages_dir = kinode_dir.join("packages"); - if matches.get_flag("SKIP_FRONTEND") { - println!("skipping frontend builds"); - } else { - // build core frontends - let core_frontends = vec![ - "src/register-ui", - "packages/app_store/ui", - "packages/homepage/ui", - // chess when brought in - ]; - - // for each frontend, execute build.sh - for frontend in core_frontends { - let frontend_path = kinode_dir.join(frontend); - if !frontend_path.exists() { - panic!("couldn't find frontend at {frontend_path:?}"); - } - let status = std::process::Command::new("sh") - .current_dir(frontend_path) - .arg("./build.sh") - .status()?; - if !status.success() { - return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend)); - } - } - } - let mut features = matches .get_many::("FEATURES") .unwrap_or_default() @@ -140,6 +114,8 @@ fn main() -> anyhow::Result<()> { features.sort(); let features = features.join(","); + let skip_frontend = matches.get_flag("SKIP_FRONTEND"); + let results: Vec)>> = fs::read_dir(&packages_dir)? .filter_map(|entry| { let entry_path = match entry { @@ -154,6 +130,7 @@ fn main() -> anyhow::Result<()> { Some(build_and_zip_package( entry_path.clone(), child_pkg_path.to_str().unwrap(), + skip_frontend, &features, )) }) From cf71c9eb1f4fa4fca71693ac79c76135900d0a51 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 8 Oct 2024 19:29:29 -0700 Subject: [PATCH 12/14] remove old Dockerfile comment --- Dockerfile.buildruntime | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile.buildruntime b/Dockerfile.buildruntime index 91e1c902..6cbf1f35 100644 --- a/Dockerfile.buildruntime +++ b/Dockerfile.buildruntime @@ -10,5 +10,4 @@ ENV NVM_DIR=/root/.nvm \ WORKDIR /input # Set the default command to run the build script -# TODO: once build is idempotent, remove the `rm -rf` line CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && find . -user root -print0 2>/dev/null | xargs -0 rm -rf"] From 9532e381e5141e8c0fcc4021da9c6ea8564e9ab8 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 8 Oct 2024 19:32:22 -0700 Subject: [PATCH 13/14] build: add packages.zip hash to runtime --- Cargo.lock | 41 ++++++++++++++++++++++++++-- kinode/Cargo.toml | 1 + kinode/build.rs | 44 ++++++++++++++++++++++++++++-- kinode/src/main.rs | 11 ++++++-- scripts/build-release.py | 17 ++++-------- scripts/build_packages/Cargo.toml | 2 +- scripts/build_packages/src/main.rs | 1 + 7 files changed, 98 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c764f2d..05c626d0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1409,7 +1409,7 @@ dependencies = [ "anyhow", "clap", "fs-err", - "kit", + "kit 0.7.7", "serde_json", "tokio", "walkdir", @@ -3784,6 +3784,43 @@ dependencies = [ "zip 0.6.6", ] +[[package]] +name = "kit" +version = "0.7.7" +source = "git+https://github.com/kinode-dao/kit?rev=9c94b4b#9c94b4bd3f2a9dc2eabb2da9bc2ef5e6eb07af9d" +dependencies = [ + "alloy 0.1.4", + "alloy-sol-macro", + "alloy-sol-types", + "anyhow", + "base64 0.21.7", + "cargo_metadata", + "clap", + "color-eyre", + "dirs 5.0.1", + "fs-err", + "git2", + "hex", + "kinode_process_lib 0.9.2", + "nix 0.27.1", + "regex", + "reqwest", + "rpassword", + "semver 1.0.23", + "serde", + "serde_json", + "sha2", + "tokio", + "toml", + "tracing", + "tracing-appender", + "tracing-error", + "tracing-subscriber", + "walkdir", + "wit-bindgen", + "zip 0.6.6", +] + [[package]] name = "kns_indexer" version = "0.2.0" @@ -3823,7 +3860,7 @@ name = "lib" version = "0.9.5" dependencies = [ "alloy 0.2.1", - "kit", + "kit 0.7.6", "lazy_static", "rand 0.8.5", "ring", diff --git a/kinode/Cargo.toml b/kinode/Cargo.toml index 7c481238..998bf4f9 100644 --- a/kinode/Cargo.toml +++ b/kinode/Cargo.toml @@ -14,6 +14,7 @@ path = "src/main.rs" [build-dependencies] anyhow = "1.0.71" +sha2 = "0.10.8" [features] simulation-mode = [] diff --git a/kinode/build.rs b/kinode/build.rs index 5ac4a90e..d2faa444 100644 --- a/kinode/build.rs +++ b/kinode/build.rs @@ -1,4 +1,7 @@ -use std::path::PathBuf; +use std::io::Read; +use std::path::{Path, PathBuf}; + +use sha2::Digest; const CANONICAL_PACKAGES_ZIP_PATH: &str = "../target/packages.zip"; @@ -8,6 +11,23 @@ macro_rules! p { } } +fn compute_hash(file_path: &Path) -> anyhow::Result { + let input_file = std::fs::File::open(file_path)?; + let mut reader = std::io::BufReader::new(input_file); + let mut hasher = sha2::Sha256::new(); + let mut buffer = [0; 1024]; // buffer for chunks of the file + + loop { + let count = reader.read(&mut buffer)?; + if count == 0 { + break; + } + hasher.update(&buffer[..count]); + } + + Ok(format!("{:x}", hasher.finalize())) +} + fn main() -> anyhow::Result<()> { let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") { Ok(env_var) => env_var, @@ -31,7 +51,24 @@ fn main() -> anyhow::Result<()> { let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?; let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?; if path_to_packages_zip_path != canonical_packages_zip_path { - std::fs::copy(path_to_packages_zip_path, CANONICAL_PACKAGES_ZIP_PATH)?; + std::fs::copy(&path_to_packages_zip_path, &canonical_packages_zip_path)?; + } + + // build core frontends + let pwd = std::env::current_dir()?; + let core_frontends = vec![ + "src/register-ui", + ]; + + // for each frontend, execute build.sh + for frontend in core_frontends { + let status = std::process::Command::new("sh") + .current_dir(pwd.join(frontend)) + .arg("./build.sh") + .status()?; + if !status.success() { + return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend)); + } } let version = if let Ok(version) = std::env::var("DOCKER_BUILD_IMAGE_VERSION") { @@ -42,5 +79,8 @@ fn main() -> anyhow::Result<()> { }; println!("cargo:rustc-env=DOCKER_BUILD_IMAGE_VERSION={version}"); + let packages_zip_hash = compute_hash(&canonical_packages_zip_path)?; + println!("cargo:rustc-env=PACKAGES_ZIP_HASH={packages_zip_hash}"); + Ok(()) } diff --git a/kinode/src/main.rs b/kinode/src/main.rs index ade4fe50..976da07f 100644 --- a/kinode/src/main.rs +++ b/kinode/src/main.rs @@ -58,10 +58,17 @@ pub const MULTICALL_ADDRESS: &str = "0xcA11bde05977b3631167028862bE2a173976CA11" #[tokio::main] async fn main() { + // embed values in binary for inspection without running & print on boot + // e.g., to inspect without running, use + // ```bash + // strings kinode | grep DOCKER_BUILD_IMAGE_VERSION + // ``` println!( - "\nDOCKER_BUILD_IMAGE_VERSION: {}\n", - env!("DOCKER_BUILD_IMAGE_VERSION") + "\nDOCKER_BUILD_IMAGE_VERSION: {}\nPACKAGES_ZIP_HASH: {}\n", + env!("DOCKER_BUILD_IMAGE_VERSION"), + env!("PACKAGES_ZIP_HASH"), ); + let app = build_command(); let matches = app.get_matches(); diff --git a/scripts/build-release.py b/scripts/build-release.py index 8170dc05..bd36ac1e 100755 --- a/scripts/build-release.py +++ b/scripts/build-release.py @@ -35,30 +35,23 @@ def build_and_move(feature, tmp_dir, architecture, os_name): subprocess.run( ["cargo", "run", "-p", "build_packages", "--", "--features", feature], check=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + #stdout=subprocess.PIPE, + #stderr=subprocess.PIPE, ) subprocess.run( ["cargo", "build", "--release", "-p", "kinode", "--features", feature], check=True, env=release_env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + #stdout=subprocess.PIPE, + #stderr=subprocess.PIPE, ) zip_name = f"{zip_prefix}-{feature}.zip" else: - subprocess.run( - ["cargo", "run", "-p", "build_packages"], - check=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + subprocess.run(["cargo", "run", "-p", "build_packages"], check=True) subprocess.run( ["cargo", "build", "--release", "-p", "kinode"], check=True, env=release_env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, ) zip_name = f"{zip_prefix}.zip" diff --git a/scripts/build_packages/Cargo.toml b/scripts/build_packages/Cargo.toml index fe507b3e..ccb9b122 100644 --- a/scripts/build_packages/Cargo.toml +++ b/scripts/build_packages/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" anyhow = "1.0.71" clap = "4" fs-err = "2.11" -kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" } +kit = { git = "https://github.com/kinode-dao/kit", rev = "9c94b4b" } serde_json = "1" tokio = "1.28" walkdir = "2.4" diff --git a/scripts/build_packages/src/main.rs b/scripts/build_packages/src/main.rs index 20a63099..02f15d77 100644 --- a/scripts/build_packages/src/main.rs +++ b/scripts/build_packages/src/main.rs @@ -60,6 +60,7 @@ fn build_and_zip_package( false, false, false, + false, ) .await .map_err(|e| anyhow::anyhow!("{:?}", e))?; From d194fccb27932f7af90e22316bcb69dc738b6bea Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 02:53:03 +0000 Subject: [PATCH 14/14] Format Rust code using rustfmt --- kinode/build.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/kinode/build.rs b/kinode/build.rs index d2faa444..a3ec15df 100644 --- a/kinode/build.rs +++ b/kinode/build.rs @@ -56,9 +56,7 @@ fn main() -> anyhow::Result<()> { // build core frontends let pwd = std::env::current_dir()?; - let core_frontends = vec![ - "src/register-ui", - ]; + let core_frontends = vec!["src/register-ui"]; // for each frontend, execute build.sh for frontend in core_frontends {