2024-02-13 07:51:02 +03:00
|
|
|
use rayon::prelude::*;
|
2024-02-02 08:18:09 +03:00
|
|
|
use std::{
|
2024-04-02 03:23:53 +03:00
|
|
|
collections::HashSet,
|
2024-02-13 07:51:02 +03:00
|
|
|
fs::{self, File},
|
2024-02-05 23:39:44 +03:00
|
|
|
io::{Cursor, Read, Write},
|
2024-02-13 07:51:02 +03:00
|
|
|
path::{Path, PathBuf},
|
2024-02-02 08:18:09 +03:00
|
|
|
};
|
2024-02-13 07:51:02 +03:00
|
|
|
use zip::write::FileOptions;
|
2024-02-02 07:43:13 +03:00
|
|
|
|
2024-03-16 11:36:49 +03:00
|
|
|
fn get_features() -> String {
|
|
|
|
let mut features = "".to_string();
|
|
|
|
for (key, _) in std::env::vars() {
|
|
|
|
if key.starts_with("CARGO_FEATURE_") {
|
2024-03-16 11:37:12 +03:00
|
|
|
let feature = key
|
|
|
|
.trim_start_matches("CARGO_FEATURE_")
|
|
|
|
.to_lowercase()
|
|
|
|
.replace("_", "-");
|
2024-03-16 11:36:49 +03:00
|
|
|
features.push_str(&feature);
|
|
|
|
//println!("cargo:rustc-cfg=feature=\"{}\"", feature);
|
|
|
|
//println!("- {}", feature);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
features
|
|
|
|
}
|
|
|
|
|
2024-04-02 03:23:53 +03:00
|
|
|
fn output_reruns(dir: &Path, rerun_files: &HashSet<String>) {
|
2024-05-06 19:20:05 +03:00
|
|
|
if rerun_files.contains(dir.to_str().unwrap()) {
|
|
|
|
// Output for all files in the directory if the directory itself is specified in rerun_files
|
|
|
|
if let Ok(entries) = fs::read_dir(dir) {
|
|
|
|
for entry in entries.filter_map(|e| e.ok()) {
|
|
|
|
let path = entry.path();
|
|
|
|
println!("cargo:rerun-if-changed={}", path.display());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Check files individually
|
|
|
|
if let Ok(entries) = fs::read_dir(dir) {
|
|
|
|
for entry in entries.filter_map(|e| e.ok()) {
|
|
|
|
let path = entry.path();
|
|
|
|
if path.is_dir() {
|
|
|
|
// If the entry is a directory, recursively walk it
|
|
|
|
output_reruns(&path, rerun_files);
|
|
|
|
} else if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
|
|
|
|
// Check if the current file is in our list of interesting files
|
|
|
|
if rerun_files.contains(filename) {
|
|
|
|
// If so, print a `cargo:rerun-if-changed=PATH` line for it
|
|
|
|
println!("cargo:rerun-if-changed={}", path.display());
|
|
|
|
}
|
2024-04-02 03:23:53 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-13 07:51:53 +03:00
|
|
|
fn build_and_zip_package(
|
|
|
|
entry_path: PathBuf,
|
|
|
|
parent_pkg_path: &str,
|
2024-03-16 11:36:49 +03:00
|
|
|
features: &str,
|
2024-02-16 02:41:40 +03:00
|
|
|
) -> anyhow::Result<(String, String, Vec<u8>)> {
|
2024-02-13 07:51:02 +03:00
|
|
|
let rt = tokio::runtime::Runtime::new().unwrap();
|
|
|
|
rt.block_on(async {
|
2024-05-14 02:32:39 +03:00
|
|
|
kit::build::execute(&entry_path, true, false, true, features, None, None) // TODO
|
2024-04-04 02:53:07 +03:00
|
|
|
.await
|
|
|
|
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
|
2024-02-02 07:43:13 +03:00
|
|
|
|
2024-02-05 23:39:44 +03:00
|
|
|
let mut writer = Cursor::new(Vec::new());
|
2024-02-13 07:51:02 +03:00
|
|
|
let options = FileOptions::default()
|
|
|
|
.compression_method(zip::CompressionMethod::Stored)
|
|
|
|
.unix_permissions(0o755);
|
2024-02-05 23:39:44 +03:00
|
|
|
{
|
|
|
|
let mut zip = zip::ZipWriter::new(&mut writer);
|
2024-02-13 07:51:02 +03:00
|
|
|
|
|
|
|
for sub_entry in walkdir::WalkDir::new(parent_pkg_path) {
|
|
|
|
let sub_entry = sub_entry?;
|
2024-02-05 23:39:44 +03:00
|
|
|
let path = sub_entry.path();
|
2024-02-13 07:51:02 +03:00
|
|
|
let name = path.strip_prefix(Path::new(parent_pkg_path))?;
|
2024-02-05 23:39:44 +03:00
|
|
|
|
|
|
|
if path.is_file() {
|
2024-02-13 07:51:02 +03:00
|
|
|
zip.start_file(name.to_string_lossy(), options)?;
|
|
|
|
let mut file = File::open(path)?;
|
2024-02-05 23:39:44 +03:00
|
|
|
let mut buffer = Vec::new();
|
2024-02-13 07:51:02 +03:00
|
|
|
file.read_to_end(&mut buffer)?;
|
|
|
|
zip.write_all(&buffer)?;
|
2024-02-05 23:39:44 +03:00
|
|
|
} else if !name.as_os_str().is_empty() {
|
2024-02-13 07:51:02 +03:00
|
|
|
zip.add_directory(name.to_string_lossy(), options)?;
|
2024-02-05 23:39:44 +03:00
|
|
|
}
|
|
|
|
}
|
2024-02-13 07:51:02 +03:00
|
|
|
zip.finish()?;
|
2024-02-05 23:39:44 +03:00
|
|
|
}
|
2024-02-13 07:51:02 +03:00
|
|
|
|
2024-02-05 23:39:44 +03:00
|
|
|
let zip_contents = writer.into_inner();
|
2024-02-13 07:51:02 +03:00
|
|
|
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
|
2024-02-16 02:41:40 +03:00
|
|
|
Ok((entry_path.display().to_string(), zip_filename, zip_contents))
|
2024-02-13 07:51:02 +03:00
|
|
|
})
|
|
|
|
}
|
2024-02-02 07:43:13 +03:00
|
|
|
|
2024-02-13 07:51:02 +03:00
|
|
|
fn main() -> anyhow::Result<()> {
|
2024-02-21 18:31:32 +03:00
|
|
|
if std::env::var("SKIP_BUILD_SCRIPT").is_ok() {
|
|
|
|
println!("Skipping build script");
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2024-02-13 07:51:02 +03:00
|
|
|
let pwd = std::env::current_dir()?;
|
|
|
|
let parent_dir = pwd.parent().unwrap();
|
|
|
|
let packages_dir = pwd.join("packages");
|
|
|
|
|
|
|
|
let entries: Vec<_> = fs::read_dir(packages_dir)?
|
|
|
|
.map(|entry| entry.unwrap().path())
|
|
|
|
.collect();
|
|
|
|
|
2024-04-02 03:23:53 +03:00
|
|
|
let rerun_files: HashSet<String> = HashSet::from([
|
|
|
|
"Cargo.lock".to_string(),
|
|
|
|
"Cargo.toml".to_string(),
|
|
|
|
"src/".to_string(),
|
|
|
|
]);
|
|
|
|
output_reruns(&parent_dir, &rerun_files);
|
|
|
|
|
2024-03-16 11:36:49 +03:00
|
|
|
let features = get_features();
|
|
|
|
|
2024-02-16 02:41:40 +03:00
|
|
|
let results: Vec<anyhow::Result<(String, String, Vec<u8>)>> = entries
|
2024-02-13 07:51:53 +03:00
|
|
|
.par_iter()
|
2024-05-17 05:51:49 +03:00
|
|
|
.filter_map(|entry_path| {
|
2024-02-13 07:51:02 +03:00
|
|
|
let parent_pkg_path = entry_path.join("pkg");
|
2024-05-17 05:51:49 +03:00
|
|
|
if parent_pkg_path.exists() {
|
|
|
|
// don't run on, e.g., `.DS_Store`
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
Some(build_and_zip_package(
|
2024-03-16 11:37:12 +03:00
|
|
|
entry_path.clone(),
|
|
|
|
parent_pkg_path.to_str().unwrap(),
|
|
|
|
&features,
|
2024-05-17 05:51:49 +03:00
|
|
|
))
|
2024-02-13 07:51:02 +03:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
// Process results, e.g., write to `bootstrapped_processes.rs`
|
|
|
|
// This part remains sequential
|
|
|
|
let mut bootstrapped_processes = vec![];
|
2024-02-13 07:51:53 +03:00
|
|
|
writeln!(
|
|
|
|
bootstrapped_processes,
|
2024-02-16 02:41:40 +03:00
|
|
|
"pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &["
|
2024-02-13 07:51:53 +03:00
|
|
|
)?;
|
2024-02-13 07:51:02 +03:00
|
|
|
|
|
|
|
for result in results {
|
|
|
|
match result {
|
2024-02-16 02:41:40 +03:00
|
|
|
Ok((entry_path, zip_filename, zip_contents)) => {
|
2024-02-13 07:51:02 +03:00
|
|
|
// Further processing, like saving ZIP files and updating bootstrapped_processes
|
2024-02-16 02:41:40 +03:00
|
|
|
let metadata_path = format!("{}/metadata.json", entry_path);
|
2024-02-13 07:51:02 +03:00
|
|
|
let zip_path = format!("{}/target/{}", parent_dir.display(), zip_filename);
|
|
|
|
fs::write(&zip_path, &zip_contents)?;
|
|
|
|
|
|
|
|
writeln!(
|
|
|
|
bootstrapped_processes,
|
2024-02-16 02:41:40 +03:00
|
|
|
" (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\")),",
|
|
|
|
zip_filename, metadata_path, zip_path,
|
2024-02-13 07:51:02 +03:00
|
|
|
)?;
|
2024-02-13 07:51:53 +03:00
|
|
|
}
|
2024-02-13 07:51:02 +03:00
|
|
|
Err(e) => return Err(e),
|
2024-02-05 23:39:44 +03:00
|
|
|
}
|
|
|
|
}
|
2024-02-13 07:51:02 +03:00
|
|
|
|
|
|
|
writeln!(bootstrapped_processes, "];")?;
|
|
|
|
let bootstrapped_processes_path = pwd.join("src/bootstrapped_processes.rs");
|
2024-02-05 23:39:44 +03:00
|
|
|
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
|
2024-02-13 07:51:02 +03:00
|
|
|
|
2024-02-02 07:43:13 +03:00
|
|
|
Ok(())
|
|
|
|
}
|