app_store: auto_update

This commit is contained in:
bitful-pannul 2024-08-20 17:16:48 +03:00
parent 339b348337
commit 39381ba2ce
10 changed files with 239 additions and 43 deletions

View File

@ -4,6 +4,7 @@ interface downloads {
//
use standard.{package-id};
use chain.{onchain-metadata};
variant download-requests {
// remote only
@ -13,6 +14,7 @@ interface downloads {
size(size-update),
// local only
local-download(local-download-request),
auto-update(auto-update-request),
download-complete(download-complete-request),
get-files(option<package-id>),
remove-file(remove-file-request),
@ -33,6 +35,11 @@ interface downloads {
desired-version-hash: string,
}
record auto-update-request {
package-id: package-id,
metadata: onchain-metadata,
}
record remote-download-request {
package-id: package-id,
worker-address: string,

View File

@ -245,7 +245,6 @@ fn gen_package_info(id: &PackageId, state: &PackageState) -> serde_json::Value {
"our_version_hash": state.our_version_hash,
"verified": state.verified,
"caps_approved": state.caps_approved,
"manifest_hash": state.manifest_hash,
})
}

View File

@ -24,11 +24,12 @@ use crate::kinode::process::main::{
LocalResponse, NewPackageRequest, NewPackageResponse, UninstallResponse,
};
use kinode_process_lib::{
await_message, call_init, get_blob, http, println, vfs, Address, LazyLoadBlob, Message,
PackageId, Response,
await_message, call_init, get_blob, http, print_to_terminal, println, vfs, Address,
LazyLoadBlob, Message, PackageId, Response,
};
use serde::{Deserialize, Serialize};
use state::State;
use std::collections::HashMap;
wit_bindgen::generate!({
path: "target/wit",
@ -149,6 +150,7 @@ fn handle_message(
if !message.is_local(&our) {
return Err(anyhow::anyhow!("download complete from non-local node"));
}
http_server.ws_push_all_channels(
"/",
http::server::WsMessageType::Text,
@ -167,6 +169,43 @@ fn handle_message(
.to_vec(),
},
);
// auto_install case:
// the downloads process has given us the new package manifest's
// capability hashes, and the old package's capability hashes.
// we can use these to determine if the new package has the same
// capabilities as the old one, and if so, auto-install it.
if let Some(context) = message.context() {
let new_caps_hashes: HashMap<String, String> = serde_json::from_slice(context)?;
if let Some(package) =
state.packages.get(&req.package_id.clone().to_process_lib())
{
let all_match = new_caps_hashes.iter().all(|(key, new_hash)| {
package
.caps_hashes
.get(key)
.map_or(false, |current_hash| new_hash == current_hash)
});
if all_match {
print_to_terminal(1, "auto_install:main, all caps_hashes match");
if let Err(e) = utils::install(
&req.package_id,
None,
&req.version_hash,
state,
&our.node,
) {
print_to_terminal(
1,
&format!("error auto_installing package: {e}"),
);
}
} else {
print_to_terminal(1, "auto_install:main, caps_hashes do not match");
}
}
}
}
}
} else {

View File

@ -50,10 +50,10 @@ pub struct PackageState {
pub our_version_hash: String,
pub verified: bool,
pub caps_approved: bool,
/// the hash of the manifest file, which is used to determine whether package
/// the hash of the request_capabilites field of every process, which is used to determine whether package
/// capabilities have changed. if they have changed, auto-install must fail
/// and the user must approve the new capabilities.
pub manifest_hash: Option<String>,
pub caps_hashes: HashMap<String, String>,
}
/// this process's saved state
@ -62,7 +62,6 @@ pub struct State {
pub packages: HashMap<PackageId, PackageState>,
/// the APIs we have
pub installed_apis: HashSet<PackageId>,
// requested maybe too.
}
impl State {
@ -115,6 +114,7 @@ impl State {
timeout: 5,
};
let manifest_bytes = manifest_file.read()?;
let caps_hashes = utils::extract_caps_hashes(&manifest_bytes)?;
self.packages.insert(
package_id.clone(),
@ -122,7 +122,7 @@ impl State {
our_version_hash,
verified: true, // implicitly verified (TODO re-evaluate)
caps_approved: false, // must re-approve if you want to do something ??
manifest_hash: Some(utils::keccak_256_hash(&manifest_bytes)),
caps_hashes,
},
);

View File

@ -11,7 +11,10 @@ use {
get_blob, kernel_types as kt, println, vfs, Address, LazyLoadBlob, PackageId, ProcessId,
Request,
},
std::{collections::HashSet, str::FromStr},
std::{
collections::{HashMap, HashSet},
str::FromStr,
},
};
// quite annoyingly, we must convert from our gen'd version of PackageId
@ -125,12 +128,11 @@ pub fn new_package(
/// create a new package drive in VFS and add the package zip to it.
/// if an `api.zip` is present, unzip and stow in `/api`.
/// returns a string representing the manifest hash of the package
/// and a bool returning whether or not an api was found and unzipped.
/// returns a hashmap representing the requested capabilities hash for each process name.
pub fn create_package_drive(
package_id: &PackageId,
package_bytes: Vec<u8>,
) -> anyhow::Result<String> {
) -> anyhow::Result<HashMap<String, String>> {
let drive_name = format!("/{package_id}/pkg");
let blob = LazyLoadBlob {
mime: Some("application/zip".to_string()),
@ -173,7 +175,9 @@ pub fn create_package_drive(
timeout: VFS_TIMEOUT,
};
let manifest_bytes = manifest_file.read()?;
Ok(keccak_256_hash(&manifest_bytes))
let caps_hashes = extract_caps_hashes(&manifest_bytes)?;
Ok(caps_hashes)
}
pub fn extract_api(package_id: &PackageId) -> anyhow::Result<bool> {
@ -220,13 +224,13 @@ pub fn install(
Some(VFS_TIMEOUT),
)?;
let bytes = file.read()?;
let manifest_hash = create_package_drive(&process_package_id, bytes)?;
let caps_hashes = create_package_drive(&process_package_id, bytes)?;
let package_state = PackageState {
our_version_hash: version_hash.to_string(),
verified: true, // sideloaded apps are implicitly verified because there is no "source" to verify against
caps_approved: true, // TODO see if we want to auto-approve local installs
manifest_hash: Some(manifest_hash),
caps_hashes,
};
if let Ok(extracted) = extract_api(&process_package_id) {
@ -449,6 +453,17 @@ pub fn uninstall(state: &mut State, package_id: &PackageId) -> anyhow::Result<()
Ok(())
}
pub fn extract_caps_hashes(manifest_bytes: &[u8]) -> anyhow::Result<HashMap<String, String>> {
let manifest = serde_json::from_slice::<Vec<kt::PackageManifestEntry>>(manifest_bytes)?;
let mut caps_hashes = HashMap::new();
for process in &manifest {
let caps_bytes = serde_json::to_vec(&process.request_capabilities)?;
let caps_hash = keccak_256_hash(&caps_bytes);
caps_hashes.insert(process.process_name.clone(), caps_hash);
}
Ok(caps_hashes)
}
fn parse_capabilities(our_node: &str, caps: &Vec<serde_json::Value>) -> Vec<kt::Capability> {
let mut requested_capabilities: Vec<kt::Capability> = vec![];
for value in caps {

View File

@ -6,12 +6,13 @@
use crate::kinode::process::chain::{
ChainError, ChainRequests, OnchainApp, OnchainMetadata, OnchainProperties,
};
use crate::kinode::process::downloads::{AutoUpdateRequest, DownloadRequests};
use alloy_primitives::keccak256;
use alloy_sol_types::SolEvent;
use kinode::process::chain::ChainResponses;
use kinode_process_lib::{
await_message, call_init, eth, get_blob, get_state, http, kernel_types as kt, kimap,
print_to_terminal, println, Address, Message, PackageId, Response,
print_to_terminal, println, Address, Message, PackageId, Request, Response,
};
use std::{
collections::{HashMap, HashSet},
@ -284,25 +285,39 @@ fn handle_eth_log(our: &Address, state: &mut State, log: eth::Log) -> anyhow::Re
listing.metadata_uri = metadata_uri;
listing.tba = tba;
listing.metadata_hash = metadata_hash;
listing.metadata = Some(metadata);
listing.metadata = Some(metadata.clone());
}
std::collections::hash_map::Entry::Vacant(listing) => {
listing.insert(PackageListing {
tba,
metadata_uri,
metadata_hash,
metadata: Some(metadata),
metadata: Some(metadata.clone()),
auto_update: false,
});
}
}
if is_our_package {
state.published.insert(package_id);
state.published.insert(package_id.clone());
}
state.last_saved_block = block_number;
// if auto_update is enabled, send a message to downloads to kick off the update.
if let Some(listing) = state.listings.get(&package_id) {
if listing.auto_update {
print_to_terminal(1, &format!("kicking off auto-update for: {}", package_id));
let request = DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
metadata: metadata.into(),
});
Request::to(("our", "downloads", "app_store", "sys"))
.body(serde_json::to_vec(&request)?)
.send()?;
}
}
Ok(())
}

View File

@ -3,18 +3,23 @@
//! manages downloading and sharing of versioned packages.
//!
use crate::kinode::process::downloads::{
DirEntry, DownloadCompleteRequest, DownloadError, DownloadRequests, DownloadResponses, Entry,
FileEntry, HashMismatch, LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
AutoUpdateRequest, DirEntry, DownloadCompleteRequest, DownloadError, DownloadRequests,
DownloadResponses, Entry, FileEntry, HashMismatch, LocalDownloadRequest, RemoteDownloadRequest,
RemoveFileRequest,
};
use std::{
collections::{HashMap, HashSet},
io::Read,
str::FromStr,
};
use std::{collections::HashSet, io::Read, str::FromStr};
use ft_worker_lib::{spawn_receive_transfer, spawn_send_transfer};
use kinode_process_lib::{
await_message, call_init, get_blob, get_state,
http::client,
print_to_terminal, println, set_state,
kernel_types as kt, print_to_terminal, println, set_state,
vfs::{self, Directory, File},
Address, Message, PackageId, Request, Response,
Address, Message, PackageId, ProcessId, Request, Response,
};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
@ -40,7 +45,9 @@ pub enum Resp {
#[derive(Debug, Serialize, Deserialize)]
pub struct State {
// persisted metadata about which packages we are mirroring
mirroring: HashSet<PackageId>,
// note, pending auto_updates are not persisted.
}
impl State {
@ -74,14 +81,22 @@ fn init(our: Address) {
open_or_create_dir("/app_store:sys/downloads").expect("could not open downloads");
let mut tmp = open_or_create_dir("/app_store:sys/downloads/tmp").expect("could not open tmp");
let mut auto_updates: HashSet<(PackageId, String)> = HashSet::new();
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(&our, &mut state, &message, &mut downloads, &mut tmp)
{
if let Err(e) = handle_message(
&our,
&mut state,
&message,
&mut downloads,
&mut tmp,
&mut auto_updates,
) {
print_to_terminal(1, &format!("error handling message: {:?}", e));
}
}
@ -99,6 +114,7 @@ fn handle_message(
message: &Message,
downloads: &mut Directory,
tmp: &mut Directory,
auto_updates: &mut HashSet<(PackageId, String)>,
) -> anyhow::Result<()> {
if message.is_request() {
match serde_json::from_slice::<DownloadRequests>(message.body())? {
@ -153,7 +169,6 @@ fn handle_message(
},
))?)
.send()?;
// ok, now technically everything is ze ready. let's see what awaits and updates we send upstream/to the frontend.
}
DownloadRequests::RemoteDownload(download_request) => {
// this is a node requesting a download from us.
@ -183,17 +198,47 @@ fn handle_message(
.send();
}
DownloadRequests::DownloadComplete(req) => {
// forward to main:app_store:sys
if !message.is_local(our) {
return Err(anyhow::anyhow!("got non local download complete"));
}
// if we have a pending auto_install, forward that context to the main process.
// it will check if the caps_hashes match (no change in capabilities), and auto_install if it does.
let context = if auto_updates.remove(&(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
)) {
match get_caps_hashes(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
) {
Ok(caps_hashes) => Some(serde_json::to_vec(&caps_hashes)?),
Err(e) => {
print_to_terminal(
1,
&format!("auto_update: error getting caps hashes: {:?}", e),
);
None
}
}
} else {
None
};
// pushed to UI via websockets
let _ = Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&req)?)
.send();
let mut request = Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&req)?);
if let Some(ctx) = context {
request = request.context(ctx);
}
request.send()?;
}
DownloadRequests::GetFiles(maybe_id) => {
// if not local, throw to the boonies.
// note, can also implement a discovery protocol here in the future
if !message.is_local(our) {
return Err(anyhow::anyhow!("not local"));
return Err(anyhow::anyhow!("got non local get_files"));
}
let files = match maybe_id {
Some(id) => {
@ -293,16 +338,56 @@ fn handle_message(
))?)
.send()?;
}
DownloadRequests::AutoUpdate(auto_update_request) => {
if !message.is_local(&our)
&& message.source().process != ProcessId::new(Some("chain"), "app_store", "sys")
{
return Err(anyhow::anyhow!(
"got auto-update from non local chain source"
));
}
let AutoUpdateRequest {
package_id,
metadata,
} = auto_update_request.clone();
let process_lib_package_id = package_id.clone().to_process_lib();
// default auto_update to publisher. TODO: more config here.
let download_from = metadata.properties.publisher;
let current_version = metadata.properties.current_version;
let code_hashes = metadata.properties.code_hashes;
let version_hash = code_hashes
.iter()
.find(|(version, _)| version == &current_version)
.map(|(_, hash)| hash.clone())
.ok_or_else(|| anyhow::anyhow!("auto_update: error for package_id: {}, current_version: {}, no matching hash found", process_lib_package_id.to_string(), current_version))?;
let download_request = LocalDownloadRequest {
package_id,
download_from,
desired_version_hash: version_hash.clone(),
};
// kick off local download to ourselves.
Request::to(("our", "downloads", "app_store", "sys"))
.body(serde_json::to_vec(&DownloadRequests::LocalDownload(
download_request,
))?)
.send()?;
auto_updates.insert((process_lib_package_id, version_hash));
}
_ => {}
}
} else {
match serde_json::from_slice::<Resp>(message.body())? {
Resp::Download(download_response) => {
// TODO handle download response
// maybe push to http? need await for that...
// these are handled in line.
print_to_terminal(
1,
&format!("got download response: {:?}", download_response),
&format!("got a weird download response: {:?}", download_response),
);
}
Resp::HttpClient(resp) => {
@ -449,6 +534,26 @@ fn extract_and_write_manifest(file_contents: &[u8], manifest_path: &str) -> anyh
Ok(())
}
fn get_caps_hashes(
package_id: PackageId,
version_hash: String,
) -> anyhow::Result<HashMap<String, String>> {
let package_dir = format!("{}/{}", "/app_store:sys/downloads", package_id.to_string());
let manifest_path = format!("{}/{}.json", package_dir, version_hash);
let manifest_file = vfs::open_file(&manifest_path, false, None)?;
let manifest_bytes = manifest_file.read()?;
let manifest = serde_json::from_slice::<Vec<kt::PackageManifestEntry>>(&manifest_bytes)?;
let mut caps_hashes = HashMap::new();
for process in &manifest {
let caps_bytes = serde_json::to_vec(&process.request_capabilities)?;
let caps_hash = keccak_256_hash(&caps_bytes);
caps_hashes.insert(process.process_name.clone(), caps_hash);
}
Ok(caps_hashes)
}
/// helper function for vfs files, open if exists, if not create
fn open_or_create_file(path: &str) -> anyhow::Result<File> {
match vfs::open_file(path, false, None) {
@ -462,15 +567,23 @@ fn open_or_create_file(path: &str) -> anyhow::Result<File> {
/// helper function for vfs directories, open if exists, if not create
fn open_or_create_dir(path: &str) -> anyhow::Result<Directory> {
match vfs::open_dir(path, false, None) {
match vfs::open_dir(path, true, None) {
Ok(dir) => Ok(dir),
Err(_) => match vfs::open_dir(path, true, None) {
Err(_) => match vfs::open_dir(path, false, None) {
Ok(dir) => Ok(dir),
Err(_) => Err(anyhow::anyhow!("could not create file")),
Err(_) => Err(anyhow::anyhow!("could not create dir")),
},
}
}
/// generate a Keccak-256 hash string (with 0x prefix) of the metadata bytes
pub fn keccak_256_hash(bytes: &[u8]) -> String {
use sha3::{Digest, Keccak256};
let mut hasher = Keccak256::new();
hasher.update(bytes);
format!("0x{:x}", hasher.finalize())
}
// quite annoyingly, we must convert from our gen'd version of PackageId
// to the process_lib's gen'd version. this is in order to access custom
// Impls that we want to use

View File

@ -163,8 +163,10 @@ fn handle_receiver(
print_to_terminal(
1,
&format!(
"ft_worker: hash mismatch: {} != {}",
version_hash, recieved_hash
"ft_worker: {} hash mismatch: desired: {} != actual: {}",
package_id.to_string(),
version_hash,
recieved_hash
),
);
let req = DownloadCompleteRequest {
@ -290,7 +292,7 @@ fn extract_and_write_manifest(file_contents: &[u8], manifest_path: &str) -> anyh
let manifest_file = open_or_create_file(&manifest_path)?;
manifest_file.write(contents.as_bytes())?;
println!("Extracted and wrote manifest.json");
print_to_terminal(1, "Extracted and wrote manifest.json");
break;
}
}

View File

@ -79,8 +79,15 @@ export default function MyDownloadsPage() {
setIsInstalling(true);
setError(null);
try {
const packageId = [...currentPath, selectedItem.File.name.replace('.zip', '')].join(':');
const versionHash = selectedItem.File.name.replace('.zip', '');
const fileName = selectedItem.File.name;
const parts = fileName.split(':');
const versionHash = parts.pop()?.replace('.zip', '');
if (!versionHash) throw new Error('Invalid file name format');
// Construct packageId by combining currentPath and remaining parts of the filename
const packageId = [...currentPath, ...parts].join(':');
await installApp(packageId, versionHash);
await fetchInstalled();
setShowCapApproval(false);

View File

@ -59,7 +59,6 @@ export interface PackageState {
our_version_hash: string;
verified: boolean;
caps_approved: boolean;
manifest_hash?: string;
}
export interface PackageManifest {