mirror of
https://github.com/uqbar-dao/nectar.git
synced 2024-11-22 03:04:35 +03:00
build: use new build system (nearly 100%)
This commit is contained in:
parent
c06eb90908
commit
2025b8c26b
41
Cargo.lock
generated
41
Cargo.lock
generated
@ -1410,6 +1410,7 @@ dependencies = [
|
||||
"clap",
|
||||
"fs-err",
|
||||
"kit",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"walkdir",
|
||||
"zip 0.6.6",
|
||||
@ -2641,18 +2642,6 @@ version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"libredox",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fixed-hash"
|
||||
version = "0.8.0"
|
||||
@ -3667,7 +3656,6 @@ dependencies = [
|
||||
"clap",
|
||||
"crossterm",
|
||||
"dashmap 5.5.3",
|
||||
"flate2",
|
||||
"futures",
|
||||
"generic-array",
|
||||
"hex",
|
||||
@ -3675,7 +3663,6 @@ dependencies = [
|
||||
"http 1.1.0",
|
||||
"indexmap",
|
||||
"jwt",
|
||||
"kit",
|
||||
"lazy_static",
|
||||
"lib",
|
||||
"nohash-hasher",
|
||||
@ -3695,19 +3682,16 @@ dependencies = [
|
||||
"snow",
|
||||
"socket2 0.5.7",
|
||||
"static_dir",
|
||||
"tar",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tokio-tungstenite 0.21.0",
|
||||
"unicode-segmentation",
|
||||
"unicode-width",
|
||||
"url",
|
||||
"walkdir",
|
||||
"warp",
|
||||
"wasi-common",
|
||||
"wasmtime",
|
||||
"wasmtime-wasi",
|
||||
"zip 0.6.6",
|
||||
"zip 1.1.4",
|
||||
]
|
||||
|
||||
@ -3895,7 +3879,6 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5953,17 +5936,6 @@ version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "tar"
|
||||
version = "0.4.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
|
||||
dependencies = [
|
||||
"filetime",
|
||||
"libc",
|
||||
"xattr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.16"
|
||||
@ -7727,17 +7699,6 @@ dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xattr"
|
||||
version = "1.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"rustix",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.35"
|
||||
|
@ -14,12 +14,6 @@ path = "src/main.rs"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = "1.0.71"
|
||||
flate2 = "1.0"
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
|
||||
tar = "0.4"
|
||||
tokio = "1.28"
|
||||
walkdir = "2.4"
|
||||
zip = "0.6"
|
||||
|
||||
[features]
|
||||
simulation-mode = []
|
||||
|
239
kinode/build.rs
239
kinode/build.rs
@ -1,227 +1,36 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::{BufReader, Cursor, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use flate2::read::GzDecoder;
|
||||
use tar::Archive;
|
||||
use zip::write::FileOptions;
|
||||
|
||||
macro_rules! p {
|
||||
($($tokens: tt)*) => {
|
||||
println!("cargo:warning={}", format!($($tokens)*))
|
||||
}
|
||||
}
|
||||
|
||||
/// get cargo features to compile packages with
|
||||
fn get_features() -> String {
|
||||
let mut features = "".to_string();
|
||||
for (key, _) in std::env::vars() {
|
||||
if key.starts_with("CARGO_FEATURE_") {
|
||||
let feature = key
|
||||
.trim_start_matches("CARGO_FEATURE_")
|
||||
.to_lowercase()
|
||||
.replace("_", "-");
|
||||
features.push_str(&feature);
|
||||
}
|
||||
}
|
||||
features
|
||||
}
|
||||
|
||||
/// print `cargo:rerun-if-changed=PATH` for each path of interest
|
||||
fn output_reruns(dir: &Path) {
|
||||
// Check files individually
|
||||
if let Ok(entries) = fs::read_dir(dir) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
if let Some(dirname) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if dirname == "ui" || dirname == "target" {
|
||||
// do not prompt a rerun if only UI/build files have changed
|
||||
continue;
|
||||
}
|
||||
// If the entry is a directory not in rerun_files, recursively walk it
|
||||
output_reruns(&path);
|
||||
}
|
||||
} else {
|
||||
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if filename.ends_with(".zip") || filename.ends_with(".wasm") {
|
||||
// do not prompt a rerun for compiled outputs
|
||||
continue;
|
||||
}
|
||||
// any other changed file within a package subdir prompts a rerun
|
||||
println!("cargo::rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn _untar_gz_file(path: &Path, dest: &Path) -> std::io::Result<()> {
|
||||
// Open the .tar.gz file
|
||||
let tar_gz = File::open(path)?;
|
||||
let tar_gz_reader = BufReader::new(tar_gz);
|
||||
|
||||
// Decode the gzip layer
|
||||
let tar = GzDecoder::new(tar_gz_reader);
|
||||
|
||||
// Create a new archive from the tar file
|
||||
let mut archive = Archive::new(tar);
|
||||
|
||||
// Unpack the archive into the specified destination directory
|
||||
archive.unpack(dest)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_and_zip_package(
|
||||
entry_path: PathBuf,
|
||||
parent_pkg_path: &str,
|
||||
features: &str,
|
||||
) -> anyhow::Result<(String, String, Vec<u8>)> {
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
rt.block_on(async {
|
||||
kit::build::execute(
|
||||
&entry_path,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
features,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
vec![],
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
|
||||
|
||||
let mut writer = Cursor::new(Vec::new());
|
||||
let options = FileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Deflated)
|
||||
.unix_permissions(0o755)
|
||||
.last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap());
|
||||
{
|
||||
let mut zip = zip::ZipWriter::new(&mut writer);
|
||||
|
||||
for sub_entry in walkdir::WalkDir::new(parent_pkg_path) {
|
||||
let sub_entry = sub_entry?;
|
||||
let path = sub_entry.path();
|
||||
let name = path.strip_prefix(Path::new(parent_pkg_path))?;
|
||||
|
||||
if path.is_file() {
|
||||
zip.start_file(name.to_string_lossy(), options)?;
|
||||
let mut file = File::open(path)?;
|
||||
let mut buffer = Vec::new();
|
||||
file.read_to_end(&mut buffer)?;
|
||||
zip.write_all(&buffer)?;
|
||||
} else if !name.as_os_str().is_empty() {
|
||||
zip.add_directory(name.to_string_lossy(), options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()?;
|
||||
}
|
||||
|
||||
let zip_contents = writer.into_inner();
|
||||
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
|
||||
Ok((entry_path.display().to_string(), zip_filename, zip_contents))
|
||||
})
|
||||
}
|
||||
const CANONICAL_PACKAGES_ZIP_PATH: &str = "../target/packages.zip";
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
if std::env::var("SKIP_BUILD_SCRIPT").is_ok() {
|
||||
p!("skipping build script");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let pwd = std::env::current_dir()?;
|
||||
let parent_dir = pwd.parent().unwrap();
|
||||
let packages_dir = pwd.join("packages");
|
||||
|
||||
if std::env::var("SKIP_BUILD_FRONTEND").is_ok() {
|
||||
p!("skipping frontend builds");
|
||||
} else {
|
||||
// build core frontends
|
||||
let core_frontends = vec![
|
||||
"src/register-ui",
|
||||
"packages/app_store/ui",
|
||||
"packages/homepage/ui",
|
||||
// chess when brought in
|
||||
];
|
||||
|
||||
// for each frontend, execute build.sh
|
||||
for frontend in core_frontends {
|
||||
let status = std::process::Command::new("sh")
|
||||
.current_dir(pwd.join(frontend))
|
||||
.arg("./build.sh")
|
||||
.status()?;
|
||||
if !status.success() {
|
||||
return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend));
|
||||
let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") {
|
||||
Err(_) => {
|
||||
let build_package_script_path = PathBuf::from("../scripts/build_package");
|
||||
let mut child = std::process::Command::new("cargo")
|
||||
.arg("run")
|
||||
.current_dir(&build_package_script_path)
|
||||
.spawn()?;
|
||||
let result = child.wait()?;
|
||||
if !result.success() {
|
||||
return Err(anyhow::anyhow!("Failed to build packages."));
|
||||
}
|
||||
CANONICAL_PACKAGES_ZIP_PATH.to_string()
|
||||
}
|
||||
Ok(env_var) => env_var,
|
||||
};
|
||||
let path = PathBuf::from(&path_to_packages_zip);
|
||||
if !path.exists() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Path to packages {path_to_packages_zip} does not exist."
|
||||
));
|
||||
}
|
||||
|
||||
output_reruns(&packages_dir);
|
||||
|
||||
let features = get_features();
|
||||
|
||||
let results: Vec<anyhow::Result<(String, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
|
||||
.filter_map(|entry| {
|
||||
let entry_path = match entry {
|
||||
Ok(e) => e.path(),
|
||||
Err(_) => return None,
|
||||
};
|
||||
let child_pkg_path = entry_path.join("pkg");
|
||||
if !child_pkg_path.exists() {
|
||||
// don't run on, e.g., `.DS_Store`
|
||||
return None;
|
||||
}
|
||||
Some(build_and_zip_package(
|
||||
entry_path.clone(),
|
||||
child_pkg_path.to_str().unwrap(),
|
||||
&features,
|
||||
))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Process results, e.g., write to `bootstrapped_processes.rs`
|
||||
// This part remains sequential
|
||||
let mut bootstrapped_processes = vec![];
|
||||
writeln!(
|
||||
bootstrapped_processes,
|
||||
"pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &["
|
||||
)?;
|
||||
|
||||
for result in results {
|
||||
match result {
|
||||
Ok((entry_path, zip_filename, zip_contents)) => {
|
||||
// Further processing, like saving ZIP files and updating bootstrapped_processes
|
||||
let metadata_path = format!("{}/metadata.json", entry_path);
|
||||
let zip_path = format!("{}/target/{}", parent_dir.display(), zip_filename);
|
||||
fs::write(&zip_path, &zip_contents)?;
|
||||
|
||||
writeln!(
|
||||
bootstrapped_processes,
|
||||
" (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\")),",
|
||||
zip_filename, metadata_path, zip_path,
|
||||
)?;
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?;
|
||||
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?;
|
||||
if path_to_packages_zip_path != canonical_packages_zip_path {
|
||||
std::fs::copy(path_to_packages_zip_path, CANONICAL_PACKAGES_ZIP_PATH)?;
|
||||
}
|
||||
|
||||
writeln!(bootstrapped_processes, "];")?;
|
||||
let target_dir = pwd.join("../target");
|
||||
if !target_dir.exists() {
|
||||
fs::create_dir_all(&target_dir)?;
|
||||
}
|
||||
let bootstrapped_processes_path = target_dir.join("bootstrapped_processes.rs");
|
||||
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
|
||||
|
||||
let version = if let Ok(version) = std::env::var("DOCKER_BUILD_IMAGE_VERSION") {
|
||||
// embed the DOCKER_BUILD_IMAGE_VERSION
|
||||
version
|
||||
|
@ -1,5 +1,3 @@
|
||||
#![feature(async_closure)]
|
||||
#![feature(btree_extract_if)]
|
||||
use anyhow::Result;
|
||||
use clap::{arg, value_parser, Command};
|
||||
use lib::types::core::{
|
||||
|
@ -14,7 +14,8 @@ use std::{
|
||||
};
|
||||
use tokio::{fs, io::AsyncWriteExt, sync::Mutex};
|
||||
|
||||
include!("../../target/bootstrapped_processes.rs");
|
||||
static PACKAGES_ZIP: &[u8] = include_bytes!("../../target/packages.zip");
|
||||
const FILE_TO_METADATA: &str = "file_to_metadata.json";
|
||||
|
||||
pub async fn load_state(
|
||||
our_name: String,
|
||||
@ -381,7 +382,7 @@ async fn bootstrap(
|
||||
current.capabilities.extend(runtime_caps.clone());
|
||||
}
|
||||
|
||||
let packages = get_zipped_packages().await;
|
||||
let packages = get_zipped_packages();
|
||||
|
||||
for (package_metadata, mut package) in packages.clone() {
|
||||
let package_name = package_metadata.properties.package_name.as_str();
|
||||
@ -412,7 +413,7 @@ async fn bootstrap(
|
||||
let mut zip_file =
|
||||
fs::File::create(format!("{}/{}.zip", &pkg_path, &our_drive_name)).await?;
|
||||
let package_zip_bytes = package.clone().into_inner().into_inner();
|
||||
zip_file.write_all(package_zip_bytes).await?;
|
||||
zip_file.write_all(&package_zip_bytes).await?;
|
||||
|
||||
// for each file in package.zip, write to vfs folder
|
||||
for i in 0..package.len() {
|
||||
@ -713,20 +714,31 @@ fn sign_cap(cap: Capability, keypair: Arc<signature::Ed25519KeyPair>) -> Vec<u8>
|
||||
}
|
||||
|
||||
/// read in `include!()`ed .zip package files
|
||||
async fn get_zipped_packages() -> Vec<(
|
||||
fn get_zipped_packages() -> Vec<(
|
||||
Erc721Metadata,
|
||||
zip::ZipArchive<std::io::Cursor<&'static [u8]>>,
|
||||
zip::ZipArchive<std::io::Cursor<Vec<u8>>>,
|
||||
)> {
|
||||
let mut packages = Vec::new();
|
||||
|
||||
for (package_name, metadata_bytes, bytes) in BOOTSTRAPPED_PROCESSES.iter() {
|
||||
if let Ok(zip) = zip::ZipArchive::new(std::io::Cursor::new(*bytes)) {
|
||||
if let Ok(metadata) = serde_json::from_slice::<Erc721Metadata>(metadata_bytes) {
|
||||
packages.push((metadata, zip));
|
||||
} else {
|
||||
println!("fs: metadata for package {package_name} is not valid Erc721Metadata!\r",);
|
||||
}
|
||||
}
|
||||
let mut packages_zip = zip::ZipArchive::new(std::io::Cursor::new(PACKAGES_ZIP)).unwrap();
|
||||
let mut file_to_metadata = vec![];
|
||||
packages_zip
|
||||
.by_name(FILE_TO_METADATA)
|
||||
.unwrap()
|
||||
.read_to_end(&mut file_to_metadata)
|
||||
.unwrap();
|
||||
let file_to_metadata: HashMap<String, Erc721Metadata> =
|
||||
serde_json::from_slice(&file_to_metadata).unwrap();
|
||||
|
||||
for (file_name, metadata) in file_to_metadata {
|
||||
let mut zip_bytes = vec![];
|
||||
packages_zip
|
||||
.by_name(&file_name)
|
||||
.unwrap()
|
||||
.read_to_end(&mut zip_bytes)
|
||||
.unwrap();
|
||||
let zip_archive = zip::ZipArchive::new(std::io::Cursor::new(zip_bytes)).unwrap();
|
||||
packages.push((metadata, zip_archive));
|
||||
}
|
||||
|
||||
packages
|
||||
|
@ -1102,10 +1102,12 @@ impl Identity {
|
||||
match &self.routing {
|
||||
NodeRouting::Routers(_) => None,
|
||||
NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => {
|
||||
if let Some(port) = ports.get("ws")
|
||||
&& *port != 0
|
||||
{
|
||||
Some((ip, port))
|
||||
if let Some(port) = ports.get("ws") {
|
||||
if *port != 0 {
|
||||
Some((ip, port))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -1116,10 +1118,12 @@ impl Identity {
|
||||
match &self.routing {
|
||||
NodeRouting::Routers(_) => None,
|
||||
NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => {
|
||||
if let Some(port) = ports.get("tcp")
|
||||
&& *port != 0
|
||||
{
|
||||
Some((ip, port))
|
||||
if let Some(port) = ports.get("tcp") {
|
||||
if *port != 0 {
|
||||
Some((ip, port))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![feature(let_chains)]
|
||||
//#![feature(let_chains)]
|
||||
|
||||
pub mod core;
|
||||
pub mod eth;
|
||||
|
@ -8,6 +8,7 @@ anyhow = "1.0.71"
|
||||
clap = "4"
|
||||
fs-err = "2.11"
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
|
||||
serde_json = "1"
|
||||
tokio = "1.28"
|
||||
walkdir = "2.4"
|
||||
zip = "0.6"
|
||||
|
@ -86,7 +86,6 @@ fn main() -> anyhow::Result<()> {
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
println!("a");
|
||||
// kinode/target/debug/build_package
|
||||
let current_exe_dir = std::env::current_exe() // build_package
|
||||
.unwrap();
|
||||
@ -100,9 +99,6 @@ fn main() -> anyhow::Result<()> {
|
||||
let kinode_dir = top_level_dir.join("kinode");
|
||||
let packages_dir = kinode_dir.join("packages");
|
||||
|
||||
println!("{current_exe_dir:?} {top_level_dir:?} {kinode_dir:?} {packages_dir:?}");
|
||||
|
||||
println!("b");
|
||||
if matches.get_flag("SKIP_FRONTEND") {
|
||||
println!("skipping frontend builds");
|
||||
} else {
|
||||
@ -130,7 +126,6 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
println!("c");
|
||||
let features = matches
|
||||
.get_many::<String>("FEATURES")
|
||||
.unwrap_or_default()
|
||||
@ -138,7 +133,6 @@ fn main() -> anyhow::Result<()> {
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
|
||||
println!("d");
|
||||
let results: Vec<anyhow::Result<(PathBuf, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
|
||||
.filter_map(|entry| {
|
||||
let entry_path = match entry {
|
||||
@ -158,62 +152,38 @@ fn main() -> anyhow::Result<()> {
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!("e");
|
||||
// Process results, e.g., write to `bootstrapped_processes.rs`
|
||||
// This part remains sequential
|
||||
let mut bootstrapped_processes = vec![];
|
||||
writeln!(
|
||||
bootstrapped_processes,
|
||||
"pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &["
|
||||
)?;
|
||||
let mut file_to_metadata = std::collections::HashMap::new();
|
||||
|
||||
println!("f");
|
||||
let target_dir = top_level_dir.join("target");
|
||||
let target_packages_dir = target_dir.join("packages");
|
||||
let target_metadatas_dir = target_dir.join("metadatas");
|
||||
for path in [&target_packages_dir, &target_metadatas_dir] {
|
||||
if !path.exists() {
|
||||
fs::create_dir_all(path)?;
|
||||
}
|
||||
// fresh
|
||||
if target_packages_dir.exists() {
|
||||
fs::remove_dir_all(&target_packages_dir)?;
|
||||
}
|
||||
fs::create_dir_all(&target_packages_dir)?;
|
||||
|
||||
println!("g");
|
||||
for result in results {
|
||||
match result {
|
||||
Ok((entry_path, zip_filename, zip_contents)) => {
|
||||
let metadata_path = entry_path.join("metadata.json");
|
||||
let metadata_file_name = {
|
||||
let metadata_file_stem =
|
||||
entry_path.file_stem().and_then(|s| s.to_str()).unwrap();
|
||||
format!("{metadata_file_stem}.json")
|
||||
};
|
||||
let new_metadata_path = target_metadatas_dir.join(metadata_file_name);
|
||||
fs::copy(&metadata_path, &new_metadata_path)?;
|
||||
let metadata_contents = fs::read_to_string(&metadata_path)?;
|
||||
let metadata_contents: serde_json::Value = serde_json::from_str(&metadata_contents)?;
|
||||
file_to_metadata.insert(zip_filename.clone(), metadata_contents);
|
||||
let zip_path = target_packages_dir.join(&zip_filename);
|
||||
fs::write(&zip_path, &zip_contents)?;
|
||||
|
||||
writeln!(
|
||||
bootstrapped_processes,
|
||||
" (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\"),),",
|
||||
zip_filename,
|
||||
new_metadata_path.display(),
|
||||
zip_path.display(),
|
||||
)?;
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
Err(e) => return Err(anyhow::anyhow!("{e:?}")),
|
||||
}
|
||||
}
|
||||
|
||||
println!("h");
|
||||
writeln!(bootstrapped_processes, "];")?;
|
||||
let bootstrapped_processes_path = target_packages_dir.join("bootstrapped_processes.rs");
|
||||
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
|
||||
let file_to_metadata = serde_json::to_value(&file_to_metadata)?;
|
||||
let file_to_metadata = serde_json::to_string_pretty(&file_to_metadata)?;
|
||||
let file_to_metadata_path = target_packages_dir.join("file_to_metadata.json");
|
||||
fs::write(&file_to_metadata_path, file_to_metadata)?;
|
||||
|
||||
println!("i");
|
||||
let package_zip_path = target_dir.join("packages.zip");
|
||||
let package_zip_contents = zip_directory(&target_packages_dir)?;
|
||||
fs::write(package_zip_path, package_zip_contents)?;
|
||||
|
||||
println!("j");
|
||||
Ok(())
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user