mirror of
https://github.com/uqbar-dao/nectar.git
synced 2024-11-22 11:22:59 +03:00
build: use rust instead of python (so we can enforce deps)
This commit is contained in:
parent
d8070e3303
commit
00688d96a5
13
Cargo.lock
generated
13
Cargo.lock
generated
@ -1402,6 +1402,19 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "build_package"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"fs-err",
|
||||
"kit",
|
||||
"tokio",
|
||||
"walkdir",
|
||||
"zip 0.6.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.16.0"
|
||||
|
@ -26,7 +26,7 @@ members = [
|
||||
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",
|
||||
"kinode/packages/terminal/kill", "kinode/packages/terminal/m", "kinode/packages/terminal/top",
|
||||
"kinode/packages/terminal/net_diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers",
|
||||
"kinode/packages/tester/tester",
|
||||
"kinode/packages/tester/tester", "scripts/build_package",
|
||||
]
|
||||
default-members = ["lib"]
|
||||
resolver = "2"
|
||||
|
@ -1,146 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
def get_features(args):
|
||||
# Join the features into a comma-separated string
|
||||
features = ','.join(args.features)
|
||||
return features
|
||||
|
||||
def zip_directory(directory_path):
|
||||
buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
for root, dirs, files in os.walk(directory_path):
|
||||
# Adding directories explicitly to ensure they are included in the zip
|
||||
for dir in dirs:
|
||||
dir_path = os.path.join(root, dir)
|
||||
arcname = os.path.relpath(dir_path, start=directory_path)
|
||||
# Create a ZipInfo object for the directory
|
||||
zi = zipfile.ZipInfo(arcname + '/')
|
||||
permissions = 0o755
|
||||
zi.external_attr = permissions << 16 | 0x10 # MS-DOS directory flag
|
||||
zi.date_time = (2023, 6, 19, 0, 0, 0)
|
||||
zip_file.writestr(zi, '')
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
arcname = os.path.relpath(file_path, start=directory_path)
|
||||
# Get file info
|
||||
st = os.stat(file_path)
|
||||
# Create ZipInfo object
|
||||
zi = zipfile.ZipInfo(arcname)
|
||||
# Set permissions
|
||||
permissions = st.st_mode
|
||||
zi.external_attr = permissions << 16
|
||||
# Set date_time
|
||||
zi.date_time = (2023, 6, 19, 0, 0, 0)
|
||||
# Read file data
|
||||
with open(file_path, 'rb') as f:
|
||||
file_data = f.read()
|
||||
zip_file.writestr(zi, file_data)
|
||||
zip_contents = buffer.getvalue()
|
||||
return zip_contents
|
||||
|
||||
def build_and_zip_package(entry_path, parent_pkg_path, features):
|
||||
# Build the package
|
||||
build_cmd = ['kit', 'build', entry_path, '--no-ui', '--skip-deps-check']
|
||||
if features:
|
||||
build_cmd += ['--features', features]
|
||||
result = subprocess.run(build_cmd, cwd=entry_path)
|
||||
if result.returncode != 0:
|
||||
raise Exception(f'Failed to build package at {entry_path}')
|
||||
|
||||
# Now zip up the parent_pkg_path directory
|
||||
zip_filename = f'{os.path.basename(entry_path)}.zip'
|
||||
zip_contents = zip_directory(parent_pkg_path)
|
||||
|
||||
return (str(entry_path), zip_filename, zip_contents)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Build and zip Rust packages.')
|
||||
parser.add_argument('--features', nargs='*', default=[], help='List of features to compile packages with')
|
||||
parser.add_argument('--skip-build-frontend', action='store_true', help='Skip building the frontend')
|
||||
args = parser.parse_args()
|
||||
|
||||
script_path = Path(os.path.abspath(__file__))
|
||||
top_level_dir = script_path.parent.parent
|
||||
kinode_dir = top_level_dir / 'kinode'
|
||||
packages_dir = kinode_dir / 'packages'
|
||||
|
||||
if args.skip_build_frontend:
|
||||
print("skipping frontend builds")
|
||||
else:
|
||||
# Build core frontends
|
||||
core_frontends = [
|
||||
'src/register-ui',
|
||||
'packages/app_store/ui',
|
||||
'packages/homepage/ui',
|
||||
# chess when brought in
|
||||
]
|
||||
|
||||
# For each frontend, execute build.sh
|
||||
for frontend in core_frontends:
|
||||
frontend_path = kinode_dir / frontend
|
||||
build_script = frontend_path / 'build.sh'
|
||||
if not build_script.exists():
|
||||
print(f'Build script not found for frontend: {frontend} at {build_script}')
|
||||
continue
|
||||
result = subprocess.run(['sh', './build.sh'], cwd=frontend_path)
|
||||
if result.returncode != 0:
|
||||
raise Exception(f'Failed to build frontend: {frontend}')
|
||||
|
||||
features = get_features(args)
|
||||
|
||||
results = []
|
||||
for entry in os.scandir(packages_dir):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
entry_path = Path(entry.path)
|
||||
child_pkg_path = entry_path / 'pkg'
|
||||
if not child_pkg_path.exists():
|
||||
continue
|
||||
result = build_and_zip_package(str(entry_path), str(child_pkg_path), features)
|
||||
results.append(result)
|
||||
|
||||
# Process results
|
||||
bootstrapped_processes = []
|
||||
bootstrapped_processes.append('pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &[')
|
||||
|
||||
target_dir = top_level_dir / 'target'
|
||||
target_packages_dir = target_dir / 'packages'
|
||||
if not target_packages_dir.exists():
|
||||
os.makedirs(target_packages_dir)
|
||||
|
||||
for (entry_path, zip_filename, zip_contents) in results:
|
||||
# Save zip_contents to zip_path
|
||||
zip_path = target_packages_dir / zip_filename
|
||||
with open(zip_path, 'wb') as f:
|
||||
f.write(zip_contents)
|
||||
|
||||
metadata_path = os.path.join(entry_path, 'metadata.json')
|
||||
|
||||
# Update bootstrapped_processes
|
||||
bootstrapped_processes.append(
|
||||
f' ("{zip_filename}", include_bytes!("{metadata_path}"), include_bytes!("{zip_path}")),'
|
||||
)
|
||||
|
||||
bootstrapped_processes.append('];')
|
||||
|
||||
bootstrapped_processes_path = target_packages_dir / 'bootstrapped_processes.rs'
|
||||
with open(bootstrapped_processes_path, 'w') as f:
|
||||
f.write('\n'.join(bootstrapped_processes))
|
||||
|
||||
zip_contents = zip_directory(target_packages_dir)
|
||||
zip_path = target_dir / 'packages.zip'
|
||||
|
||||
with open(zip_path, 'wb') as f:
|
||||
f.write(zip_contents)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
13
scripts/build_package/Cargo.toml
Normal file
13
scripts/build_package/Cargo.toml
Normal file
@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "build_package"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.71"
|
||||
clap = "4"
|
||||
fs-err = "2.11"
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
|
||||
tokio = "1.28"
|
||||
walkdir = "2.4"
|
||||
zip = "0.6"
|
215
scripts/build_package/src/main.rs
Normal file
215
scripts/build_package/src/main.rs
Normal file
@ -0,0 +1,215 @@
|
||||
use std::{
|
||||
io::{Cursor, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use clap::{Arg, Command};
|
||||
use fs_err as fs;
|
||||
use zip::write::FileOptions;
|
||||
|
||||
fn zip_directory(dir_path: &Path) -> anyhow::Result<Vec<u8>> {
|
||||
let mut writer = Cursor::new(Vec::new());
|
||||
let options = FileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Deflated)
|
||||
.unix_permissions(0o755)
|
||||
.last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap());
|
||||
{
|
||||
let mut zip = zip::ZipWriter::new(&mut writer);
|
||||
|
||||
for sub_entry in walkdir::WalkDir::new(dir_path) {
|
||||
let sub_entry = sub_entry?;
|
||||
let path = sub_entry.path();
|
||||
let name = path.strip_prefix(dir_path)?;
|
||||
|
||||
if path.is_file() {
|
||||
zip.start_file(name.to_string_lossy(), options)?;
|
||||
let mut file = fs::File::open(path)?;
|
||||
let mut buffer = Vec::new();
|
||||
file.read_to_end(&mut buffer)?;
|
||||
zip.write_all(&buffer)?;
|
||||
} else if !name.as_os_str().is_empty() {
|
||||
zip.add_directory(name.to_string_lossy(), options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()?;
|
||||
}
|
||||
|
||||
let zip_contents = writer.into_inner();
|
||||
Ok(zip_contents)
|
||||
}
|
||||
|
||||
fn build_and_zip_package(
|
||||
entry_path: PathBuf,
|
||||
parent_pkg_path: &str,
|
||||
features: &str,
|
||||
) -> anyhow::Result<(PathBuf, String, Vec<u8>)> {
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
rt.block_on(async {
|
||||
kit::build::execute(
|
||||
&entry_path,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
features,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
vec![],
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
|
||||
|
||||
let zip_contents = zip_directory(&Path::new(parent_pkg_path))?;
|
||||
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
|
||||
Ok((entry_path, zip_filename, zip_contents))
|
||||
})
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let matches = Command::new("build_package")
|
||||
.about("Build the core Kinode packages.")
|
||||
.arg(Arg::new("FEATURES")
|
||||
.long("features")
|
||||
.help("List of features to compile packages with")
|
||||
.action(clap::ArgAction::Append))
|
||||
.arg(Arg::new("SKIP_FRONTEND")
|
||||
.long("skip-build-frontend")
|
||||
.help("Skip building the frontend")
|
||||
.action(clap::ArgAction::SetTrue))
|
||||
.get_matches();
|
||||
|
||||
|
||||
println!("a");
|
||||
// kinode/target/debug/build_package
|
||||
let current_exe_dir = std::env::current_exe() // build_package
|
||||
.unwrap();
|
||||
let top_level_dir = current_exe_dir
|
||||
.parent() // debug/
|
||||
.unwrap()
|
||||
.parent() // target/
|
||||
.unwrap()
|
||||
.parent() // kinode/
|
||||
.unwrap();
|
||||
let kinode_dir = top_level_dir.join("kinode");
|
||||
let packages_dir = kinode_dir.join("packages");
|
||||
|
||||
println!("{current_exe_dir:?} {top_level_dir:?} {kinode_dir:?} {packages_dir:?}");
|
||||
|
||||
println!("b");
|
||||
if matches.get_flag("SKIP_FRONTEND") {
|
||||
println!("skipping frontend builds");
|
||||
} else {
|
||||
// build core frontends
|
||||
let core_frontends = vec![
|
||||
"src/register-ui",
|
||||
"packages/app_store/ui",
|
||||
"packages/homepage/ui",
|
||||
// chess when brought in
|
||||
];
|
||||
|
||||
// for each frontend, execute build.sh
|
||||
for frontend in core_frontends {
|
||||
let frontend_path = kinode_dir.join(frontend);
|
||||
if !frontend_path.exists() {
|
||||
panic!("couldn't find frontend at {frontend_path:?}");
|
||||
}
|
||||
let status = std::process::Command::new("sh")
|
||||
.current_dir(frontend_path)
|
||||
.arg("./build.sh")
|
||||
.status()?;
|
||||
if !status.success() {
|
||||
return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("c");
|
||||
let features = matches.get_many::<String>("FEATURES")
|
||||
.unwrap_or_default()
|
||||
.map(|s| s.to_owned())
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
|
||||
println!("d");
|
||||
let results: Vec<anyhow::Result<(PathBuf, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
|
||||
.filter_map(|entry| {
|
||||
let entry_path = match entry {
|
||||
Ok(e) => e.path(),
|
||||
Err(_) => return None,
|
||||
};
|
||||
let child_pkg_path = entry_path.join("pkg");
|
||||
if !child_pkg_path.exists() {
|
||||
// don't run on, e.g., `.DS_Store`
|
||||
return None;
|
||||
}
|
||||
Some(build_and_zip_package(
|
||||
entry_path.clone(),
|
||||
child_pkg_path.to_str().unwrap(),
|
||||
&features,
|
||||
))
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!("e");
|
||||
// Process results, e.g., write to `bootstrapped_processes.rs`
|
||||
// This part remains sequential
|
||||
let mut bootstrapped_processes = vec![];
|
||||
writeln!(
|
||||
bootstrapped_processes,
|
||||
"pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &["
|
||||
)?;
|
||||
|
||||
println!("f");
|
||||
let target_dir = top_level_dir.join("target");
|
||||
let target_packages_dir = target_dir.join("packages");
|
||||
let target_metadatas_dir = target_dir.join("metadatas");
|
||||
for path in [&target_packages_dir, &target_metadatas_dir] {
|
||||
if !path.exists() {
|
||||
fs::create_dir_all(path)?;
|
||||
}
|
||||
}
|
||||
|
||||
println!("g");
|
||||
for result in results {
|
||||
match result {
|
||||
Ok((entry_path, zip_filename, zip_contents)) => {
|
||||
let metadata_path = entry_path.join("metadata.json");
|
||||
let metadata_file_name = {
|
||||
let metadata_file_stem = entry_path
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap();
|
||||
format!("{metadata_file_stem}.json")
|
||||
};
|
||||
let new_metadata_path = target_metadatas_dir.join(metadata_file_name);
|
||||
fs::copy(&metadata_path, &new_metadata_path)?;
|
||||
let zip_path = target_packages_dir.join(&zip_filename);
|
||||
fs::write(&zip_path, &zip_contents)?;
|
||||
|
||||
writeln!(
|
||||
bootstrapped_processes,
|
||||
" (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\"),),",
|
||||
zip_filename, new_metadata_path.display(), zip_path.display(),
|
||||
)?;
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
println!("h");
|
||||
writeln!(bootstrapped_processes, "];")?;
|
||||
let bootstrapped_processes_path = target_packages_dir.join("bootstrapped_processes.rs");
|
||||
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
|
||||
|
||||
println!("i");
|
||||
let package_zip_path = target_dir.join("packages.zip");
|
||||
let package_zip_contents = zip_directory(&target_packages_dir)?;
|
||||
fs::write(package_zip_path, package_zip_contents)?;
|
||||
|
||||
println!("j");
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in New Issue
Block a user