Merge branch 'develop' into dr/contacts

This commit is contained in:
dr-frmr 2024-10-14 14:08:16 -04:00
commit 4521d82093
No known key found for this signature in database
24 changed files with 633 additions and 439 deletions

99
Cargo.lock generated
View File

@ -1402,6 +1402,20 @@ dependencies = [
"zeroize",
]
[[package]]
name = "build_packages"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"fs-err",
"kit 0.7.7",
"serde_json",
"tokio",
"walkdir",
"zip 0.6.6",
]
[[package]]
name = "bumpalo"
version = "3.16.0"
@ -2638,18 +2652,6 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
dependencies = [
"cfg-if",
"libc",
"libredox",
"windows-sys 0.59.0",
]
[[package]]
name = "fixed-hash"
version = "0.8.0"
@ -3649,7 +3651,7 @@ dependencies = [
[[package]]
name = "kinode"
version = "0.9.5"
version = "0.9.7"
dependencies = [
"aes-gcm",
"alloy 0.2.1",
@ -3664,7 +3666,6 @@ dependencies = [
"clap",
"crossterm",
"dashmap 5.5.3",
"flate2",
"futures",
"generic-array",
"hex",
@ -3672,7 +3673,6 @@ dependencies = [
"http 1.1.0",
"indexmap",
"jwt",
"kit",
"lazy_static",
"lib",
"libc",
@ -3693,25 +3693,22 @@ dependencies = [
"snow",
"socket2 0.5.7",
"static_dir",
"tar",
"thiserror",
"tokio",
"tokio-tungstenite 0.21.0",
"unicode-segmentation",
"unicode-width",
"url",
"walkdir",
"warp",
"wasi-common",
"wasmtime",
"wasmtime-wasi",
"zip 0.6.6",
"zip 1.1.4",
]
[[package]]
name = "kinode_lib"
version = "0.9.5"
version = "0.9.7"
dependencies = [
"lib",
]
@ -3798,6 +3795,43 @@ dependencies = [
"zip 0.6.6",
]
[[package]]
name = "kit"
version = "0.7.7"
source = "git+https://github.com/kinode-dao/kit?rev=9c94b4b#9c94b4bd3f2a9dc2eabb2da9bc2ef5e6eb07af9d"
dependencies = [
"alloy 0.1.4",
"alloy-sol-macro",
"alloy-sol-types",
"anyhow",
"base64 0.21.7",
"cargo_metadata",
"clap",
"color-eyre",
"dirs 5.0.1",
"fs-err",
"git2",
"hex",
"kinode_process_lib 0.9.2",
"nix 0.27.1",
"regex",
"reqwest",
"rpassword",
"semver 1.0.23",
"serde",
"serde_json",
"sha2",
"tokio",
"toml",
"tracing",
"tracing-appender",
"tracing-error",
"tracing-subscriber",
"walkdir",
"wit-bindgen",
"zip 0.6.6",
]
[[package]]
name = "kns_indexer"
version = "0.2.0"
@ -3834,10 +3868,10 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "lib"
version = "0.9.5"
version = "0.9.7"
dependencies = [
"alloy 0.2.1",
"kit",
"kit 0.7.6",
"lazy_static",
"rand 0.8.5",
"ring",
@ -3893,7 +3927,6 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.6.0",
"libc",
"redox_syscall",
]
[[package]]
@ -5951,17 +5984,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tar"
version = "0.4.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "target-lexicon"
version = "0.12.16"
@ -7725,17 +7747,6 @@ dependencies = [
"tap",
]
[[package]]
name = "xattr"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f"
dependencies = [
"libc",
"linux-raw-sys",
"rustix",
]
[[package]]
name = "zerocopy"
version = "0.7.35"

View File

@ -1,7 +1,7 @@
[package]
name = "kinode_lib"
authors = ["KinodeDAO"]
version = "0.9.5"
version = "0.9.7"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
@ -27,7 +27,7 @@ members = [
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",
"kinode/packages/terminal/kill", "kinode/packages/terminal/m", "kinode/packages/terminal/top",
"kinode/packages/terminal/net_diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers",
"kinode/packages/tester/tester",
"kinode/packages/tester/tester", "scripts/build_packages",
]
default-members = ["lib"]
resolver = "2"

View File

@ -10,5 +10,4 @@ ENV NVM_DIR=/root/.nvm \
WORKDIR /input
# Set the default command to run the build script
# TODO: once build is idempotent, remove the `rm -rf` line
CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && rm -rf target/ kinode/packages/*/pkg/*wasm kinode/packages/*/*/target/ kinode/packages/*/pkg/api.zip kinode/packages/*/*/wit kinode/packages/app_store/pkg/ui kinode/packages/homepage/pkg/ui kinode/src/register-ui/build && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && rm -rf target/ kinode/packages/*/pkg/*wasm kinode/packages/*/*/target/ kinode/packages/*/pkg/api.zip kinode/packages/*/*/wit kinode/packages/app_store/pkg/ui kinode/packages/homepage/pkg/ui kinode/src/register-ui/build"]
CMD ["/bin/bash", "-c", ". ~/.bashrc && . ~/.cargo/env && . $NVM_DIR/nvm.sh && ./scripts/build-release.py && cp -r /tmp/kinode-release/* /output && chmod 664 /output/* && find . -user root -print0 2>/dev/null | xargs -0 rm -rf"]

View File

@ -28,26 +28,26 @@ On certain operating systems, you may need to install these dependencies if they
git clone git@github.com:kinode-dao/kinode.git
# Get some stuff so we can build Wasm.
# Install Rust and some `cargo` tools so we can build the runtime and Wasm.
cd kinode
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
cargo install wasm-tools
rustup install nightly
rustup target add wasm32-wasi
rustup target add wasm32-wasi --toolchain nightly
rustup target add wasm32-wasip1
rustup target add wasm32-wasip1 --toolchain nightly
cargo install cargo-wasi
# Install NPM so we can build frontends for "distro" packages.
# https://docs.npmjs.com/downloading-and-installing-node-js-and-npm
# If you want to skip this step, run cargo build with the environment variable SKIP_BUILD_FRONTEND=true
# If you want to skip this step, build the packages with `cargo run -p build_packages -- --skip-build-frontend` to neglect building the frontends
# Build the runtime, along with a number of "distro" Wasm modules.
# The compiled binary will be at `kinode/target/debug/kinode`
# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`)
# Build the "distro" Wasm modules, then, build the runtime.
# The compiled packages will be at `kinode/target/packages.zip`.
# The compiled binary will be at `kinode/target/debug/kinode`.
# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`).
cargo +nightly build -p kinode
cd kinode
cargo run -p build_packages
cargo build -p kinode
```
## Security Status

View File

@ -1,7 +1,7 @@
[package]
name = "kinode"
authors = ["KinodeDAO"]
version = "0.9.5"
version = "0.9.7"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
@ -14,12 +14,7 @@ path = "src/main.rs"
[build-dependencies]
anyhow = "1.0.71"
flate2 = "1.0"
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
tar = "0.4"
tokio = "1.28"
walkdir = "2.4"
zip = "0.6"
sha2 = "0.10.8"
[features]
simulation-mode = []

View File

@ -1,12 +1,9 @@
use std::{
fs::{self, File},
io::{BufReader, Cursor, Read, Write},
path::{Path, PathBuf},
};
use std::io::Read;
use std::path::{Path, PathBuf};
use flate2::read::GzDecoder;
use tar::Archive;
use zip::write::FileOptions;
use sha2::Digest;
const CANONICAL_PACKAGES_ZIP_PATH: &str = "../target/packages.zip";
macro_rules! p {
($($tokens: tt)*) => {
@ -14,144 +11,52 @@ macro_rules! p {
}
}
/// get cargo features to compile packages with
fn get_features() -> String {
let mut features = "".to_string();
for (key, _) in std::env::vars() {
if key.starts_with("CARGO_FEATURE_") {
let feature = key
.trim_start_matches("CARGO_FEATURE_")
.to_lowercase()
.replace("_", "-");
features.push_str(&feature);
fn compute_hash(file_path: &Path) -> anyhow::Result<String> {
let input_file = std::fs::File::open(file_path)?;
let mut reader = std::io::BufReader::new(input_file);
let mut hasher = sha2::Sha256::new();
let mut buffer = [0; 1024]; // buffer for chunks of the file
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
}
features
hasher.update(&buffer[..count]);
}
/// print `cargo:rerun-if-changed=PATH` for each path of interest
fn output_reruns(dir: &Path) {
// Check files individually
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if path.is_dir() {
if let Some(dirname) = path.file_name().and_then(|n| n.to_str()) {
if dirname == "ui" || dirname == "target" {
// do not prompt a rerun if only UI/build files have changed
continue;
}
// If the entry is a directory not in rerun_files, recursively walk it
output_reruns(&path);
}
} else {
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
if filename.ends_with(".zip") || filename.ends_with(".wasm") {
// do not prompt a rerun for compiled outputs
continue;
}
// any other changed file within a package subdir prompts a rerun
println!("cargo::rerun-if-changed={}", path.display());
}
}
}
}
}
fn _untar_gz_file(path: &Path, dest: &Path) -> std::io::Result<()> {
// Open the .tar.gz file
let tar_gz = File::open(path)?;
let tar_gz_reader = BufReader::new(tar_gz);
// Decode the gzip layer
let tar = GzDecoder::new(tar_gz_reader);
// Create a new archive from the tar file
let mut archive = Archive::new(tar);
// Unpack the archive into the specified destination directory
archive.unpack(dest)?;
Ok(())
}
fn build_and_zip_package(
entry_path: PathBuf,
parent_pkg_path: &str,
features: &str,
) -> anyhow::Result<(String, String, Vec<u8>)> {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
kit::build::execute(
&entry_path,
true,
false,
true,
features,
None,
None,
None,
vec![],
vec![],
false,
false,
false,
)
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
let mut writer = Cursor::new(Vec::new());
let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755)
.last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap());
{
let mut zip = zip::ZipWriter::new(&mut writer);
for sub_entry in walkdir::WalkDir::new(parent_pkg_path) {
let sub_entry = sub_entry?;
let path = sub_entry.path();
let name = path.strip_prefix(Path::new(parent_pkg_path))?;
if path.is_file() {
zip.start_file(name.to_string_lossy(), options)?;
let mut file = File::open(path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
zip.write_all(&buffer)?;
} else if !name.as_os_str().is_empty() {
zip.add_directory(name.to_string_lossy(), options)?;
}
}
zip.finish()?;
}
let zip_contents = writer.into_inner();
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
Ok((entry_path.display().to_string(), zip_filename, zip_contents))
})
Ok(format!("{:x}", hasher.finalize()))
}
fn main() -> anyhow::Result<()> {
if std::env::var("SKIP_BUILD_SCRIPT").is_ok() {
p!("skipping build script");
return Ok(());
let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") {
Ok(env_var) => env_var,
Err(_) => {
let canonical_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH);
if canonical_path.exists() {
p!("No path given via PATH_TO_PACKAGES_ZIP envvar. Defaulting to path of `kinode/target/packages.zip`.");
CANONICAL_PACKAGES_ZIP_PATH.to_string()
} else {
return Err(anyhow::anyhow!("You must build packages.zip with scripts/build_packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated)."));
}
}
};
let path = PathBuf::from(&path_to_packages_zip);
if !path.exists() {
return Err(anyhow::anyhow!(
"Path to packages {path_to_packages_zip} does not exist."
));
}
let pwd = std::env::current_dir()?;
let parent_dir = pwd.parent().unwrap();
let packages_dir = pwd.join("packages");
let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?;
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?;
if path_to_packages_zip_path != canonical_packages_zip_path {
std::fs::copy(&path_to_packages_zip_path, &canonical_packages_zip_path)?;
}
if std::env::var("SKIP_BUILD_FRONTEND").is_ok() {
p!("skipping frontend builds");
} else {
// build core frontends
let core_frontends = vec![
"src/register-ui",
"packages/app_store/ui",
"packages/homepage/ui",
// chess when brought in
];
let pwd = std::env::current_dir()?;
let core_frontends = vec!["src/register-ui"];
// for each frontend, execute build.sh
for frontend in core_frontends {
@ -163,64 +68,6 @@ fn main() -> anyhow::Result<()> {
return Err(anyhow::anyhow!("Failed to build frontend: {}", frontend));
}
}
}
output_reruns(&packages_dir);
let features = get_features();
let results: Vec<anyhow::Result<(String, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
.filter_map(|entry| {
let entry_path = match entry {
Ok(e) => e.path(),
Err(_) => return None,
};
let child_pkg_path = entry_path.join("pkg");
if !child_pkg_path.exists() {
// don't run on, e.g., `.DS_Store`
return None;
}
Some(build_and_zip_package(
entry_path.clone(),
child_pkg_path.to_str().unwrap(),
&features,
))
})
.collect();
// Process results, e.g., write to `bootstrapped_processes.rs`
// This part remains sequential
let mut bootstrapped_processes = vec![];
writeln!(
bootstrapped_processes,
"pub static BOOTSTRAPPED_PROCESSES: &[(&str, &[u8], &[u8])] = &["
)?;
for result in results {
match result {
Ok((entry_path, zip_filename, zip_contents)) => {
// Further processing, like saving ZIP files and updating bootstrapped_processes
let metadata_path = format!("{}/metadata.json", entry_path);
let zip_path = format!("{}/target/{}", parent_dir.display(), zip_filename);
fs::write(&zip_path, &zip_contents)?;
writeln!(
bootstrapped_processes,
" (\"{}\", include_bytes!(\"{}\"), include_bytes!(\"{}\")),",
zip_filename, metadata_path, zip_path,
)?;
}
Err(e) => return Err(e),
}
}
writeln!(bootstrapped_processes, "];")?;
let target_dir = pwd.join("../target");
if !target_dir.exists() {
fs::create_dir_all(&target_dir)?;
}
let bootstrapped_processes_path = target_dir.join("bootstrapped_processes.rs");
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
let version = if let Ok(version) = std::env::var("DOCKER_BUILD_IMAGE_VERSION") {
// embed the DOCKER_BUILD_IMAGE_VERSION
@ -230,5 +77,8 @@ fn main() -> anyhow::Result<()> {
};
println!("cargo:rustc-env=DOCKER_BUILD_IMAGE_VERSION={version}");
let packages_zip_hash = compute_hash(&canonical_packages_zip_path)?;
println!("cargo:rustc-env=PACKAGES_ZIP_HASH={packages_zip_hash}");
Ok(())
}

View File

@ -184,6 +184,8 @@ interface downloads {
auto-update(auto-update-request),
/// Notify that a download is complete
download-complete(download-complete-request),
/// Auto-update-download complete
auto-download-complete(auto-download-complete-request),
/// Get files for a package
get-files(option<package-id>),
/// Remove a file
@ -243,6 +245,12 @@ interface downloads {
err: option<download-error>,
}
/// Request for an auto-download complete
record auto-download-complete-request {
download-info: download-complete-request,
manifest-hash: string,
}
/// Represents a hash mismatch error
record hash-mismatch {
desired: string,

View File

@ -30,7 +30,7 @@
//! It delegates these responsibilities to the downloads and chain processes respectively.
//!
use crate::kinode::process::downloads::{
DownloadCompleteRequest, DownloadResponses, ProgressUpdate,
AutoDownloadCompleteRequest, DownloadCompleteRequest, DownloadResponses, ProgressUpdate,
};
use crate::kinode::process::main::{
ApisResponse, GetApiResponse, InstallPackageRequest, InstallResponse, LocalRequest,
@ -65,6 +65,7 @@ pub enum Req {
LocalRequest(LocalRequest),
Progress(ProgressUpdate),
DownloadComplete(DownloadCompleteRequest),
AutoDownloadComplete(AutoDownloadCompleteRequest),
Http(http::server::HttpServerRequest),
}
@ -161,6 +162,40 @@ fn handle_message(
},
);
}
Req::AutoDownloadComplete(req) => {
if !message.is_local(&our) {
return Err(anyhow::anyhow!(
"auto download complete from non-local node"
));
}
// auto_install case:
// the downloads process has given us the new package manifest's
// capability hashes, and the old package's capability hashes.
// we can use these to determine if the new package has the same
// capabilities as the old one, and if so, auto-install it.
let manifest_hash = req.manifest_hash;
let package_id = req.download_info.package_id;
let version_hash = req.download_info.version_hash;
if let Some(package) = state.packages.get(&package_id.clone().to_process_lib()) {
if package.manifest_hash == Some(manifest_hash) {
print_to_terminal(1, "auto_install:main, manifest_hash match");
if let Err(e) =
utils::install(&package_id, None, &version_hash, state, &our.node)
{
print_to_terminal(1, &format!("error auto_installing package: {e}"));
} else {
println!(
"auto_installed update for package: {:?}",
&package_id.to_process_lib()
);
}
} else {
print_to_terminal(1, "auto_install:main, manifest_hash do not match");
}
}
}
Req::DownloadComplete(req) => {
if !message.is_local(&our) {
return Err(anyhow::anyhow!("download complete from non-local node"));
@ -182,41 +217,6 @@ fn handle_message(
.unwrap(),
},
);
// auto_install case:
// the downloads process has given us the new package manifest's
// capability hashes, and the old package's capability hashes.
// we can use these to determine if the new package has the same
// capabilities as the old one, and if so, auto-install it.
if let Some(context) = message.context() {
let manifest_hash = String::from_utf8(context.to_vec())?;
if let Some(package) =
state.packages.get(&req.package_id.clone().to_process_lib())
{
if package.manifest_hash == Some(manifest_hash) {
print_to_terminal(1, "auto_install:main, manifest_hash match");
if let Err(e) = utils::install(
&req.package_id,
None,
&req.version_hash,
state,
&our.node,
) {
print_to_terminal(
1,
&format!("error auto_installing package: {e}"),
);
} else {
println!(
"auto_installed update for package: {:?}",
&req.package_id.to_process_lib()
);
}
} else {
print_to_terminal(1, "auto_install:main, manifest_hash do not match");
}
}
}
}
}
} else {

View File

@ -42,9 +42,9 @@
//! mechanism is implemented in the FT worker for improved modularity and performance.
//!
use crate::kinode::process::downloads::{
AutoUpdateRequest, DirEntry, DownloadCompleteRequest, DownloadError, DownloadRequests,
DownloadResponses, Entry, FileEntry, HashMismatch, LocalDownloadRequest, RemoteDownloadRequest,
RemoveFileRequest,
AutoDownloadCompleteRequest, AutoUpdateRequest, DirEntry, DownloadCompleteRequest,
DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry, HashMismatch,
LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
};
use std::{collections::HashSet, io::Read, str::FromStr};
@ -245,7 +245,7 @@ fn handle_message(
// if we have a pending auto_install, forward that context to the main process.
// it will check if the caps_hashes match (no change in capabilities), and auto_install if it does.
let context = if auto_updates.remove(&(
let manifest_hash = if auto_updates.remove(&(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
)) {
@ -253,7 +253,7 @@ fn handle_message(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
) {
Ok(manifest_hash) => Some(manifest_hash.as_bytes().to_vec()),
Ok(manifest_hash) => Some(manifest_hash),
Err(e) => {
print_to_terminal(
1,
@ -267,13 +267,26 @@ fn handle_message(
};
// pushed to UI via websockets
let mut request = Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&req)?);
Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&req)?)
.send()?;
if let Some(ctx) = context {
request = request.context(ctx);
// trigger auto-update install trigger to main:app_store:sys
if let Some(manifest_hash) = manifest_hash {
let auto_download_complete_req = AutoDownloadCompleteRequest {
download_info: req.clone(),
manifest_hash,
};
print_to_terminal(
1,
&format!(
"auto_update download complete: triggering install on main:app_store:sys"
),
);
Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&auto_download_complete_req)?)
.send()?;
}
request.send()?;
}
DownloadRequests::GetFiles(maybe_id) => {
// if not local, throw to the boonies.

View File

@ -6,7 +6,7 @@ interface PackageSelectorProps {
}
const PackageSelector: React.FC<PackageSelectorProps> = ({ onPackageSelect }) => {
const { installed } = useAppsStore();
const { installed, fetchInstalled } = useAppsStore();
const [selectedPackage, setSelectedPackage] = useState<string>("");
const [customPackage, setCustomPackage] = useState<string>("");
const [isCustomPackageSelected, setIsCustomPackageSelected] = useState(false);
@ -18,6 +18,10 @@ const PackageSelector: React.FC<PackageSelectorProps> = ({ onPackageSelect }) =>
}
}, [selectedPackage, onPackageSelect]);
useEffect(() => {
fetchInstalled();
}, []);
const handlePackageChange = (e: React.ChangeEvent<HTMLSelectElement>) => {
const value = e.target.value;
if (value === "custom") {

View File

@ -1,11 +1,10 @@
import React, { useState, useEffect, useCallback, useMemo } from "react";
import { useParams, useNavigate } from "react-router-dom";
import { useParams } from "react-router-dom";
import { FaDownload, FaSpinner, FaChevronDown, FaChevronUp, FaRocket, FaTrash, FaPlay } from "react-icons/fa";
import useAppsStore from "../store";
import { MirrorSelector } from '../components';
export default function DownloadPage() {
const navigate = useNavigate();
const { id } = useParams<{ id: string }>();
const {
listings,
@ -28,6 +27,9 @@ export default function DownloadPage() {
const [isMirrorOnline, setIsMirrorOnline] = useState<boolean | null>(null);
const [showCapApproval, setShowCapApproval] = useState(false);
const [manifest, setManifest] = useState<any>(null);
const [isInstalling, setIsInstalling] = useState(false);
const [isCheckingLaunch, setIsCheckingLaunch] = useState(false);
const [launchPath, setLaunchPath] = useState<string | null>(null);
const app = useMemo(() => listings[id || ""], [listings, id]);
const appDownloads = useMemo(() => downloads[id || ""] || [], [downloads, id]);
@ -101,6 +103,36 @@ export default function DownloadPage() {
return versionData ? installedApp.our_version_hash === versionData.hash : false;
}, [app, selectedVersion, installedApp, sortedVersions]);
const checkLaunchPath = useCallback(() => {
if (!app) return;
setIsCheckingLaunch(true);
const appId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
fetchHomepageApps().then(() => {
const path = getLaunchUrl(appId);
setLaunchPath(path);
setIsCheckingLaunch(false);
if (path) {
setIsInstalling(false);
}
});
}, [app, fetchHomepageApps, getLaunchUrl]);
useEffect(() => {
if (isInstalling) {
const checkInterval = setInterval(checkLaunchPath, 500);
const timeout = setTimeout(() => {
clearInterval(checkInterval);
setIsInstalling(false);
setIsCheckingLaunch(false);
}, 5000);
return () => {
clearInterval(checkInterval);
clearTimeout(timeout);
};
}
}, [isInstalling, checkLaunchPath]);
const handleDownload = useCallback(() => {
if (!id || !selectedMirror || !app || !selectedVersion) return;
const versionData = sortedVersions.find(v => v.version === selectedVersion);
@ -130,36 +162,87 @@ export default function DownloadPage() {
}
}, [id, app, appDownloads]);
const canDownload = useMemo(() => {
return selectedMirror && (isMirrorOnline === true || selectedMirror.startsWith('http')) && !isDownloading && !isDownloaded;
}, [selectedMirror, isMirrorOnline, isDownloading, isDownloaded]);
const confirmInstall = useCallback(() => {
if (!id || !selectedVersion) return;
const versionData = sortedVersions.find(v => v.version === selectedVersion);
if (versionData) {
setIsInstalling(true);
setLaunchPath(null);
installApp(id, versionData.hash).then(() => {
fetchData(id);
setShowCapApproval(false);
setManifest(null);
fetchData(id);
});
}
}, [id, selectedVersion, sortedVersions, installApp, fetchData]);
const handleLaunch = useCallback(() => {
if (app) {
const launchUrl = getLaunchUrl(`${app.package_id.package_name}:${app.package_id.publisher_node}`);
if (launchUrl) {
window.location.href = launchUrl;
if (launchPath) {
window.location.href = launchPath;
}
}
}, [app, getLaunchUrl]);
}, [launchPath]);
const canLaunch = useMemo(() => {
if (!app) return false;
return !!getLaunchUrl(`${app.package_id.package_name}:${app.package_id.publisher_node}`);
}, [app, getLaunchUrl]);
const canDownload = useMemo(() => {
return selectedMirror && (isMirrorOnline === true || selectedMirror.startsWith('http')) && !isDownloading && !isDownloaded;
}, [selectedMirror, isMirrorOnline, isDownloading, isDownloaded]);
const renderActionButton = () => {
if (isCurrentVersionInstalled || launchPath) {
return (
<button className="action-button installed-button" disabled>
<FaRocket /> Installed
</button>
);
}
if (isInstalling || isCheckingLaunch) {
return (
<button className="action-button installing-button" disabled>
<FaSpinner className="fa-spin" /> Installing...
</button>
);
}
if (isDownloaded) {
return (
<button
onClick={() => {
const versionData = sortedVersions.find(v => v.version === selectedVersion);
if (versionData) {
handleInstall(versionData.version, versionData.hash);
}
}}
className="action-button install-button"
>
<FaRocket /> Install
</button>
);
}
return (
<button
onClick={handleDownload}
disabled={!canDownload}
className="action-button download-button"
>
{isDownloading ? (
<>
<FaSpinner className="fa-spin" /> Downloading... {downloadProgress}%
</>
) : (
<>
<FaDownload /> Download
</>
)}
</button>
);
};
if (!app) {
return <div className="downloads-page"><h4>Loading app details...</h4></div>;
}
@ -176,15 +259,22 @@ export default function DownloadPage() {
<p className="app-id">{`${app.package_id.package_name}.${app.package_id.publisher_node}`}</p>
</div>
</div>
{installedApp && (
{launchPath ? (
<button
onClick={handleLaunch}
className="launch-button"
disabled={!canLaunch}
>
<FaPlay /> {canLaunch ? 'Launch' : 'No UI found for app'}
<FaPlay /> Launch
</button>
)}
) : isInstalling || isCheckingLaunch ? (
<button className="launch-button" disabled>
<FaSpinner className="fa-spin" /> Checking...
</button>
) : installedApp ? (
<button className="launch-button" disabled>
No UI found for app
</button>
) : null}
</div>
<p className="app-description">{app.metadata?.description}</p>
@ -207,39 +297,7 @@ export default function DownloadPage() {
onMirrorSelect={handleMirrorSelect}
/>
{isCurrentVersionInstalled ? (
<button className="action-button installed-button" disabled>
<FaRocket /> Installed
</button>
) : isDownloaded ? (
<button
onClick={() => {
const versionData = sortedVersions.find(v => v.version === selectedVersion);
if (versionData) {
handleInstall(versionData.version, versionData.hash);
}
}}
className="action-button install-button"
>
<FaRocket /> Install
</button>
) : (
<button
onClick={handleDownload}
disabled={!canDownload}
className="action-button download-button"
>
{isDownloading ? (
<>
<FaSpinner className="fa-spin" /> Downloading... {downloadProgress}%
</>
) : (
<>
<FaDownload /> Download
</>
)}
</button>
)}
{renderActionButton()}
</div>
<div className="my-downloads">

View File

@ -12,7 +12,7 @@ const NAME_INVALID = "Package name must contain only valid characters (a-z, 0-9,
export default function PublishPage() {
const { openConnectModal } = useConnectModal();
const { ourApps, fetchOurApps, installed, downloads } = useAppsStore();
const { ourApps, fetchOurApps, downloads } = useAppsStore();
const publicClient = usePublicClient();
const { address, isConnected, isConnecting } = useAccount();

View File

@ -218,12 +218,6 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
});
if (res.status === HTTP_STATUS.CREATED) {
await get().fetchInstalled();
// hacky: a small delay (500ms) before fetching homepage apps
// to give the app time to add itself to the homepage
// might make sense to add more state and do retry logic instead.
await new Promise(resolve => setTimeout(resolve, 500));
await get().fetchHomepageApps();
}
} catch (error) {

View File

@ -1,5 +1,3 @@
#![feature(async_closure)]
#![feature(btree_extract_if)]
use anyhow::Result;
use clap::{arg, value_parser, Command};
use lib::types::core::{
@ -66,10 +64,17 @@ pub const MULTICALL_ADDRESS: &str = "0xcA11bde05977b3631167028862bE2a173976CA11"
#[tokio::main]
async fn main() {
// embed values in binary for inspection without running & print on boot
// e.g., to inspect without running, use
// ```bash
// strings kinode | grep DOCKER_BUILD_IMAGE_VERSION
// ```
println!(
"\nDOCKER_BUILD_IMAGE_VERSION: {}\n",
env!("DOCKER_BUILD_IMAGE_VERSION")
"\nDOCKER_BUILD_IMAGE_VERSION: {}\nPACKAGES_ZIP_HASH: {}\n",
env!("DOCKER_BUILD_IMAGE_VERSION"),
env!("PACKAGES_ZIP_HASH"),
);
let app = build_command();
let matches = app.get_matches();
@ -713,11 +718,11 @@ fn build_command() -> Command {
)
.arg(
arg!(--"max-peers" <MAX_PEERS> "Maximum number of peers to hold active connections with (default 32)")
.value_parser(value_parser!(u32)),
.value_parser(value_parser!(u64)),
)
.arg(
arg!(--"max-passthroughs" <MAX_PASSTHROUGHS> "Maximum number of passthroughs serve as a router (default 0)")
.value_parser(value_parser!(u32)),
.value_parser(value_parser!(u64)),
)
.arg(
arg!(--"soft-ulimit" <SOFT_ULIMIT> "Enforce a static maximum number of file descriptors (default fetched from system)")

View File

@ -6,17 +6,33 @@ use tokio::sync::mpsc;
/// if target is a peer, queue to be routed
/// otherwise, create peer and initiate routing
pub async fn send_to_peer(ext: &IdentityExt, data: &NetData, km: KernelMessage) {
pub async fn send_to_peer(ext: &IdentityExt, data: &NetData, mut km: KernelMessage) {
if let Some(mut peer) = data.peers.get_mut(&km.target.node) {
peer.sender.send(km).expect("net: peer sender was dropped");
match peer.send(km) {
Ok(()) => {
peer.set_last_message();
} else {
return;
}
Err(e_km) => {
// peer connection was closed, remove it and try to reconnect
data.peers.remove(&peer.identity.name).await;
km = e_km.0;
}
}
}
let Some(peer_id) = data.pki.get(&km.target.node) else {
return utils::error_offline(km, &ext.network_error_tx).await;
};
let (mut peer, peer_rx) = Peer::new(peer_id.clone(), false);
// send message to be routed
peer.send(km);
match peer.send(km) {
Ok(()) => {
peer.set_last_message();
}
Err(e_km) => {
return utils::error_offline(e_km.0, &ext.network_error_tx).await;
}
};
data.peers.insert(peer_id.name.clone(), peer).await;
tokio::spawn(connect_to_peer(
ext.clone(),
@ -25,7 +41,6 @@ pub async fn send_to_peer(ext: &IdentityExt, data: &NetData, km: KernelMessage)
peer_rx,
));
}
}
/// based on peer's identity, either use one of their
/// protocols to connect directly, or loop through their

View File

@ -69,7 +69,6 @@ pub async fn networking(
peers,
pending_passthroughs,
active_passthroughs,
max_peers,
max_passthroughs,
fds_limit: 10, // small hardcoded limit that gets replaced by fd_manager soon after boot
};
@ -212,7 +211,7 @@ async fn handle_local_request(
printout.push_str(&format!(
"we have connections with {} peers ({} max):\r\n",
data.peers.peers().len(),
data.max_peers,
data.peers.max_peers(),
));
let now = std::time::SystemTime::now()
@ -342,16 +341,17 @@ async fn handle_fdman(km: &KernelMessage, request_body: &[u8], data: &mut NetDat
match req {
lib::core::FdManagerRequest::FdsLimit(fds_limit) => {
data.fds_limit = fds_limit;
if data.max_peers > fds_limit {
data.max_peers = fds_limit;
}
data.peers.set_max_peers(fds_limit);
// TODO combine with max_peers check
// only update passthrough limit if it's higher than the new fds limit
// most nodes have passthroughs disabled, meaning this will keep it at 0
if data.max_passthroughs > fds_limit {
data.max_passthroughs = fds_limit;
}
// TODO cull passthroughs too
if data.peers.peers().len() >= data.fds_limit as usize {
let diff = data.peers.peers().len() - data.fds_limit as usize;
println!("net: culling {diff} peer(s)\r\n");
data.peers.cull(diff).await;
}
}

View File

@ -6,6 +6,7 @@ use {
dashmap::DashMap,
ring::signature::Ed25519KeyPair,
serde::{Deserialize, Serialize},
std::sync::atomic::AtomicU64,
std::sync::Arc,
tokio::net::TcpStream,
tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender},
@ -57,7 +58,7 @@ pub struct RoutingRequest {
#[derive(Clone)]
pub struct Peers {
max_peers: u64,
max_peers: Arc<AtomicU64>,
send_to_loop: MessageSender,
peers: Arc<DashMap<String, Peer>>,
}
@ -65,7 +66,7 @@ pub struct Peers {
impl Peers {
pub fn new(max_peers: u64, send_to_loop: MessageSender) -> Self {
Self {
max_peers,
max_peers: Arc::new(max_peers.into()),
send_to_loop,
peers: Arc::new(DashMap::new()),
}
@ -75,6 +76,15 @@ impl Peers {
&self.peers
}
pub fn max_peers(&self) -> u64 {
self.max_peers.load(std::sync::atomic::Ordering::Relaxed)
}
pub fn set_max_peers(&self, max_peers: u64) {
self.max_peers
.store(max_peers, std::sync::atomic::Ordering::Relaxed);
}
pub fn get(&self, name: &str) -> Option<dashmap::mapref::one::Ref<'_, String, Peer>> {
self.peers.get(name)
}
@ -94,7 +104,7 @@ impl Peers {
/// remove the one with the oldest last_message.
pub async fn insert(&self, name: String, peer: Peer) {
self.peers.insert(name, peer);
if self.peers.len() > self.max_peers as usize {
if self.peers.len() as u64 > self.max_peers.load(std::sync::atomic::Ordering::Relaxed) {
let oldest = self
.peers
.iter()
@ -102,7 +112,7 @@ impl Peers {
.unwrap()
.key()
.clone();
self.peers.remove(&oldest);
self.remove(&oldest).await;
crate::fd_manager::send_fd_manager_hit_fds_limit(
&Address::new("our", NET_PROCESS_ID.clone()),
&self.send_to_loop,
@ -122,7 +132,7 @@ impl Peers {
sorted_peers.sort_by_key(|p| p.last_message);
to_remove.extend(sorted_peers.iter().take(n));
for peer in to_remove {
self.peers.remove(&peer.identity.name);
self.remove(&peer.identity.name).await;
}
crate::fd_manager::send_fd_manager_hit_fds_limit(
&Address::new("our", NET_PROCESS_ID.clone()),
@ -189,9 +199,13 @@ impl Peer {
}
/// Send a message to the peer.
pub fn send(&mut self, km: KernelMessage) {
self.sender.send(km).expect("net: peer sender was dropped");
pub fn send(
&mut self,
km: KernelMessage,
) -> Result<(), tokio::sync::mpsc::error::SendError<KernelMessage>> {
self.sender.send(km)?;
self.set_last_message();
Ok(())
}
/// Update the last message time to now.
@ -222,7 +236,6 @@ pub struct NetData {
pub pending_passthroughs: PendingPassthroughs,
/// only used by routers
pub active_passthroughs: ActivePassthroughs,
pub max_peers: u64,
pub max_passthroughs: u64,
pub fds_limit: u64,
}

View File

@ -14,7 +14,8 @@ use std::{
};
use tokio::{fs, io::AsyncWriteExt, sync::Mutex};
include!("../../target/bootstrapped_processes.rs");
static PACKAGES_ZIP: &[u8] = include_bytes!("../../target/packages.zip");
const FILE_TO_METADATA: &str = "file_to_metadata.json";
pub async fn load_state(
our_name: String,
@ -381,7 +382,7 @@ async fn bootstrap(
current.capabilities.extend(runtime_caps.clone());
}
let packages = get_zipped_packages().await;
let packages = get_zipped_packages();
for (package_metadata, mut package) in packages.clone() {
let package_name = package_metadata.properties.package_name.as_str();
@ -412,7 +413,7 @@ async fn bootstrap(
let mut zip_file =
fs::File::create(format!("{}/{}.zip", &pkg_path, &our_drive_name)).await?;
let package_zip_bytes = package.clone().into_inner().into_inner();
zip_file.write_all(package_zip_bytes).await?;
zip_file.write_all(&package_zip_bytes).await?;
// for each file in package.zip, write to vfs folder
for i in 0..package.len() {
@ -713,20 +714,28 @@ fn sign_cap(cap: Capability, keypair: Arc<signature::Ed25519KeyPair>) -> Vec<u8>
}
/// read in `include!()`ed .zip package files
async fn get_zipped_packages() -> Vec<(
Erc721Metadata,
zip::ZipArchive<std::io::Cursor<&'static [u8]>>,
)> {
fn get_zipped_packages() -> Vec<(Erc721Metadata, zip::ZipArchive<std::io::Cursor<Vec<u8>>>)> {
let mut packages = Vec::new();
for (package_name, metadata_bytes, bytes) in BOOTSTRAPPED_PROCESSES.iter() {
if let Ok(zip) = zip::ZipArchive::new(std::io::Cursor::new(*bytes)) {
if let Ok(metadata) = serde_json::from_slice::<Erc721Metadata>(metadata_bytes) {
packages.push((metadata, zip));
} else {
println!("fs: metadata for package {package_name} is not valid Erc721Metadata!\r",);
}
}
let mut packages_zip = zip::ZipArchive::new(std::io::Cursor::new(PACKAGES_ZIP)).unwrap();
let mut file_to_metadata = vec![];
packages_zip
.by_name(FILE_TO_METADATA)
.unwrap()
.read_to_end(&mut file_to_metadata)
.unwrap();
let file_to_metadata: HashMap<String, Erc721Metadata> =
serde_json::from_slice(&file_to_metadata).unwrap();
for (file_name, metadata) in file_to_metadata {
let mut zip_bytes = vec![];
packages_zip
.by_name(&file_name)
.unwrap()
.read_to_end(&mut zip_bytes)
.unwrap();
let zip_archive = zip::ZipArchive::new(std::io::Cursor::new(zip_bytes)).unwrap();
packages.push((metadata, zip_archive));
}
packages

View File

@ -1,7 +1,7 @@
[package]
name = "lib"
authors = ["KinodeDAO"]
version = "0.9.5"
version = "0.9.7"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"

View File

@ -1104,13 +1104,15 @@ impl Identity {
match &self.routing {
NodeRouting::Routers(_) => None,
NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => {
if let Some(port) = ports.get("ws")
&& *port != 0
{
if let Some(port) = ports.get("ws") {
if *port != 0 {
Some((ip, port))
} else {
None
}
} else {
None
}
}
}
}
@ -1118,13 +1120,15 @@ impl Identity {
match &self.routing {
NodeRouting::Routers(_) => None,
NodeRouting::Direct { ip, ports } | NodeRouting::Both { ip, ports, .. } => {
if let Some(port) = ports.get("tcp")
&& *port != 0
{
if let Some(port) = ports.get("tcp") {
if *port != 0 {
Some((ip, port))
} else {
None
}
} else {
None
}
}
}
}

View File

@ -1,4 +1,4 @@
#![feature(let_chains)]
//#![feature(let_chains)]
pub mod core;
pub mod eth;

View File

@ -27,14 +27,32 @@ def build_and_move(feature, tmp_dir, architecture, os_name):
zip_prefix = f"kinode-{architecture}-{os_name}"
release_env = os.environ.copy()
release_env["CARGO_PROFILE_RELEASE_LTO"] = f"fat"
release_env["CARGO_PROFILE_RELEASE_CODEGEN_UNITS"] = f"1"
release_env["CARGO_PROFILE_RELEASE_STRIP"] = f"symbols"
release_env["CARGO_PROFILE_RELEASE_LTO"] = "fat"
release_env["CARGO_PROFILE_RELEASE_CODEGEN_UNITS"] = "1"
release_env["CARGO_PROFILE_RELEASE_STRIP"] = "symbols"
if feature:
subprocess.run(["cargo", "+nightly", "build", "--release", "-p", "kinode", "--features", feature], check=True, env=release_env)
release_env["PATH_TO_PACKAGES_ZIP"] = f"../target/packages-{feature}.zip"
subprocess.run(
["cargo", "run", "-p", "build_packages", "--", "--features", feature],
check=True,
#stdout=subprocess.PIPE,
#stderr=subprocess.PIPE,
)
subprocess.run(
["cargo", "build", "--release", "-p", "kinode", "--features", feature],
check=True,
env=release_env,
#stdout=subprocess.PIPE,
#stderr=subprocess.PIPE,
)
zip_name = f"{zip_prefix}-{feature}.zip"
else:
subprocess.run(["cargo", "+nightly", "build", "--release", "-p", "kinode"], check=True, env=release_env)
subprocess.run(["cargo", "run", "-p", "build_packages"], check=True)
subprocess.run(
["cargo", "build", "--release", "-p", "kinode"],
check=True,
env=release_env,
)
zip_name = f"{zip_prefix}.zip"
# Move and rename the binary
@ -74,4 +92,3 @@ def main():
if __name__ == "__main__":
main()

View File

@ -0,0 +1,14 @@
[package]
name = "build_packages"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.71"
clap = "4"
fs-err = "2.11"
kit = { git = "https://github.com/kinode-dao/kit", rev = "9c94b4b" }
serde_json = "1"
tokio = "1.28"
walkdir = "2.4"
zip = "0.6"

View File

@ -0,0 +1,185 @@
use std::{
io::{Cursor, Read, Write},
path::{Path, PathBuf},
};
use clap::{Arg, Command};
use fs_err as fs;
use zip::write::FileOptions;
fn zip_directory(dir_path: &Path) -> anyhow::Result<Vec<u8>> {
let mut writer = Cursor::new(Vec::new());
let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755)
.last_modified_time(zip::DateTime::from_date_and_time(2023, 6, 19, 0, 0, 0).unwrap());
{
let mut zip = zip::ZipWriter::new(&mut writer);
for sub_entry in walkdir::WalkDir::new(dir_path) {
let sub_entry = sub_entry?;
let path = sub_entry.path();
let name = path.strip_prefix(dir_path)?;
if path.is_file() {
zip.start_file(name.to_string_lossy(), options)?;
let mut file = fs::File::open(path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
zip.write_all(&buffer)?;
} else if !name.as_os_str().is_empty() {
zip.add_directory(name.to_string_lossy(), options)?;
}
}
zip.finish()?;
}
let zip_contents = writer.into_inner();
Ok(zip_contents)
}
fn build_and_zip_package(
entry_path: PathBuf,
parent_pkg_path: &str,
skip_frontend: bool,
features: &str,
) -> anyhow::Result<(PathBuf, String, Vec<u8>)> {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
kit::build::execute(
&entry_path,
skip_frontend,
false,
true,
features,
None,
None,
None,
vec![],
vec![],
false,
false,
false,
false,
)
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
let zip_contents = zip_directory(&Path::new(parent_pkg_path))?;
let zip_filename = format!("{}.zip", entry_path.file_name().unwrap().to_str().unwrap());
Ok((entry_path, zip_filename, zip_contents))
})
}
fn main() -> anyhow::Result<()> {
let matches = Command::new("build_packages")
.about("Build the core Kinode packages.")
.arg(
Arg::new("FEATURES")
.long("features")
.help("List of features to compile packages with")
.action(clap::ArgAction::Append),
)
.arg(
Arg::new("SKIP_FRONTEND")
.long("skip-build-frontend")
.help("Skip building the frontend")
.action(clap::ArgAction::SetTrue),
)
.arg(
Arg::new("OUTPUT_FILENAME")
.long("output-filename")
.help("Set output filename (default: packages-{features}.zip)")
.action(clap::ArgAction::Set),
)
.get_matches();
// kinode/target/debug/build_package
let current_exe_dir = std::env::current_exe() // build_package
.unwrap();
let top_level_dir = current_exe_dir
.parent() // debug/
.unwrap()
.parent() // target/
.unwrap()
.parent() // kinode/
.unwrap();
let kinode_dir = top_level_dir.join("kinode");
let packages_dir = kinode_dir.join("packages");
let mut features = matches
.get_many::<String>("FEATURES")
.unwrap_or_default()
.map(|s| s.to_owned())
.collect::<Vec<String>>();
features.sort();
let features = features.join(",");
let skip_frontend = matches.get_flag("SKIP_FRONTEND");
let results: Vec<anyhow::Result<(PathBuf, String, Vec<u8>)>> = fs::read_dir(&packages_dir)?
.filter_map(|entry| {
let entry_path = match entry {
Ok(e) => e.path(),
Err(_) => return None,
};
let child_pkg_path = entry_path.join("pkg");
if !child_pkg_path.exists() {
// don't run on, e.g., `.DS_Store`
return None;
}
Some(build_and_zip_package(
entry_path.clone(),
child_pkg_path.to_str().unwrap(),
skip_frontend,
&features,
))
})
.collect();
let mut file_to_metadata = std::collections::HashMap::new();
let target_dir = top_level_dir.join("target");
let target_packages_dir = target_dir.join("packages");
// fresh
if target_packages_dir.exists() {
fs::remove_dir_all(&target_packages_dir)?;
}
fs::create_dir_all(&target_packages_dir)?;
for result in results {
match result {
Ok((entry_path, zip_filename, zip_contents)) => {
let metadata_path = entry_path.join("metadata.json");
let metadata_contents = fs::read_to_string(&metadata_path)?;
let metadata_contents: serde_json::Value =
serde_json::from_str(&metadata_contents)?;
file_to_metadata.insert(zip_filename.clone(), metadata_contents);
let zip_path = target_packages_dir.join(&zip_filename);
fs::write(&zip_path, &zip_contents)?;
}
Err(e) => return Err(anyhow::anyhow!("{e:?}")),
}
}
let file_to_metadata = serde_json::to_value(&file_to_metadata)?;
let file_to_metadata = serde_json::to_string_pretty(&file_to_metadata)?;
let file_to_metadata_path = target_packages_dir.join("file_to_metadata.json");
fs::write(&file_to_metadata_path, file_to_metadata)?;
let package_zip_file_name = match matches.get_one::<String>("OUTPUT_FILENAME") {
Some(filename) => filename.to_string(),
None => {
if features.is_empty() {
"packages.zip".to_string()
} else {
format!("packages-{features}.zip")
}
}
};
let package_zip_path = target_dir.join(package_zip_file_name);
let package_zip_contents = zip_directory(&target_packages_dir)?;
fs::write(package_zip_path, package_zip_contents)?;
Ok(())
}