Merge branch 'v0.10.0' into bp/kviterators

This commit is contained in:
bitful-pannul 2024-12-19 23:21:57 +02:00
commit a59d540ba1
82 changed files with 2958 additions and 922 deletions

39
Cargo.lock generated
View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
version = 4
[[package]]
name = "addr2line"
@ -4191,7 +4191,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy 0.8.1",
"alloy-primitives 0.8.15",
@ -4701,6 +4701,17 @@ dependencies = [
"libc",
]
[[package]]
name = "node_info"
version = "0.1.0"
dependencies = [
"kinode_process_lib 0.10.0",
"process_macros",
"serde",
"serde_json",
"wit-bindgen 0.36.0",
]
[[package]]
name = "nohash-hasher"
version = "0.2.0"
@ -5684,6 +5695,17 @@ dependencies = [
"windows-registry",
]
[[package]]
name = "reset"
version = "0.1.0"
dependencies = [
"kinode_process_lib 0.10.0",
"process_macros",
"serde",
"serde_json",
"wit-bindgen 0.36.0",
]
[[package]]
name = "rfc6979"
version = "0.4.0"
@ -6294,7 +6316,7 @@ dependencies = [
[[package]]
name = "snow"
version = "0.9.0"
source = "git+https://github.com/dr-frmr/snow?branch=dr/extract_cipherstates#1d4eb5f6747aa59aabb32bbbe698fb4bb7dfb9a4"
source = "git+https://github.com/dr-frmr/snow?branch=dr%2Fextract_cipherstates#1d4eb5f6747aa59aabb32bbbe698fb4bb7dfb9a4"
dependencies = [
"aes-gcm",
"blake2",
@ -6364,17 +6386,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "state"
version = "0.1.0"
dependencies = [
"kinode_process_lib 0.10.0",
"process_macros",
"serde",
"serde_json",
"wit-bindgen 0.36.0",
]
[[package]]
name = "static_assertions"
version = "1.1.0"

View File

@ -19,8 +19,8 @@ members = [
"kinode/packages/chess/chess",
"kinode/packages/contacts/contacts",
"kinode/packages/homepage/homepage",
"kinode/packages/kns-indexer/kns-indexer", "kinode/packages/kns-indexer/get-block", "kinode/packages/kns-indexer/state",
"kinode/packages/settings/settings",
"kinode/packages/kns-indexer/kns-indexer", "kinode/packages/kns-indexer/get-block", "kinode/packages/settings/settings", "kinode/packages/kns-indexer/reset",
"kinode/packages/kns-indexer/node-info",
"kinode/packages/terminal/terminal",
"kinode/packages/terminal/alias", "kinode/packages/terminal/cat", "kinode/packages/terminal/echo",
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",

View File

@ -1890,7 +1890,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -134,6 +134,10 @@ interface chain {
///
/// lazy-load-blob: none.
stop-auto-update(package-id),
/// Reset app-store db
///
/// lazy-load-blob: none.
reset,
}
/// Responses from the chain component
@ -149,6 +153,8 @@ interface chain {
/// lazy-load-blob: none.
auto-update-stopped,
/// lazy-load-blob: none.
/// successful reset
reset-ok,
err(chain-error),
}
@ -297,6 +303,9 @@ interface downloads {
blob-not-found,
vfs-error,
handling-error(string),
timeout,
invalid-manifest,
offline,
}
/// Notification that a download is complete
@ -306,12 +315,26 @@ interface downloads {
err: option<download-error>,
}
/// Request for an auto-download complete
record auto-download-complete-request {
download-info: download-complete-request,
/// Variant for an auto-download complete
variant auto-download-complete-request {
success(auto-download-success),
err(auto-download-error),
}
/// Auto-download success
record auto-download-success {
package-id: package-id,
version-hash: string,
manifest-hash: string,
}
/// Auto-download error
record auto-download-error {
package-id: package-id,
version-hash: string,
tries: list<tuple<string, download-error>>, // (mirror, error)
}
/// Represents a hash mismatch error
record hash-mismatch {
desired: string,

View File

@ -11,7 +11,7 @@ alloy-primitives = "0.8.15"
alloy-sol-types = "0.8.15"
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }

View File

@ -3,11 +3,13 @@
//! and sends back http_responses.
//!
use crate::{
kinode::process::chain::{ChainRequests, ChainResponses},
kinode::process::downloads::{
kinode::process::{
chain::{ChainRequests, ChainResponses},
downloads::{
DownloadRequests, DownloadResponses, Entry, LocalDownloadRequest, RemoveFileRequest,
},
state::{MirrorCheck, PackageState, State},
},
state::{MirrorCheck, PackageState, State, Updates},
};
use kinode_process_lib::{
http::{self, server, Method, StatusCode},
@ -28,6 +30,7 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
"/downloads", // all downloads
"/installed", // all installed apps
"/ourapps", // all apps we've published
"/updates", // all auto_updates
"/apps/:id", // detail about an on-chain app
"/downloads/:id", // local downloads for an app
"/installed/:id", // detail about an installed app
@ -37,7 +40,9 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
"/apps/:id/install", // install a downloaded app
"/downloads/:id/mirror", // start mirroring a version of a downloaded app
"/downloads/:id/remove", // remove a downloaded app
"/reset", // reset chain state, re-index
"/apps/:id/auto-update", // set auto-updating a version of a downloaded app
"/updates/:id/clear", // clear update info for an app.
"/mirrorcheck/:node", // check if a node/mirror is online/offline
] {
http_server
@ -207,9 +212,10 @@ fn make_widget() -> String {
pub fn handle_http_request(
our: &Address,
state: &mut State,
updates: &mut Updates,
req: &server::IncomingHttpRequest,
) -> (server::HttpResponse, Option<LazyLoadBlob>) {
match serve_paths(our, state, req) {
match serve_paths(our, state, updates, req) {
Ok((status_code, _headers, body)) => (
server::HttpResponse::new(status_code).header("Content-Type", "application/json"),
Some(LazyLoadBlob {
@ -248,13 +254,13 @@ fn gen_package_info(id: &PackageId, state: &PackageState) -> serde_json::Value {
"our_version_hash": state.our_version_hash,
"verified": state.verified,
"caps_approved": state.caps_approved,
"pending_update_hash": state.pending_update_hash,
})
}
fn serve_paths(
our: &Address,
state: &mut State,
updates: &mut Updates,
req: &server::IncomingHttpRequest,
) -> anyhow::Result<(http::StatusCode, Option<HashMap<String, String>>, Vec<u8>)> {
let method = req.method()?;
@ -533,7 +539,6 @@ fn serve_paths(
.ok_or(anyhow::anyhow!("missing blob"))?
.bytes;
let body_json: serde_json::Value = serde_json::from_slice(&body).unwrap_or_default();
let version_hash = body_json
.get("version_hash")
.and_then(|v| v.as_str())
@ -697,6 +702,53 @@ fn serve_paths(
)),
}
}
// GET all failed/pending auto_updates
"/updates" => {
let serialized = serde_json::to_vec(&updates).unwrap_or_default();
return Ok((StatusCode::OK, None, serialized));
}
// POST clear all failed/pending auto_updates for a package_id
"/updates/:id/clear" => {
let Ok(package_id) = get_package_id(url_params) else {
return Ok((
StatusCode::BAD_REQUEST,
None,
format!("Missing package_id").into_bytes(),
));
};
if method != Method::POST {
return Ok((
StatusCode::METHOD_NOT_ALLOWED,
None,
format!("Invalid method {method} for {bound_path}").into_bytes(),
));
}
let _ = updates.package_updates.remove(&package_id);
updates.save();
Ok((StatusCode::OK, None, vec![]))
}
// POST reset chain state, re-index
"/reset" => {
if method != Method::POST {
return Ok((
StatusCode::METHOD_NOT_ALLOWED,
None,
format!("Invalid method {method} for {bound_path}").into_bytes(),
));
}
let chain = Address::from_str("our@chain:app-store:sys")?;
let resp = Request::new()
.target(chain)
.body(&ChainRequests::Reset)
.send_and_await_response(5)??;
let msg = serde_json::from_slice::<ChainResponses>(resp.body())?;
if let ChainResponses::ResetOk = msg {
Ok((StatusCode::OK, None, vec![]))
} else {
Ok((StatusCode::INTERNAL_SERVER_ERROR, None, vec![]))
}
}
// GET online/offline mirrors for a listed app
"/mirrorcheck/:node" => {
if method != Method::GET {

View File

@ -42,7 +42,7 @@ use kinode_process_lib::{
LazyLoadBlob, Message, PackageId, Response,
};
use serde::{Deserialize, Serialize};
use state::State;
use state::{State, UpdateInfo, Updates};
wit_bindgen::generate!({
path: "target/wit",
@ -78,20 +78,22 @@ pub enum Resp {
call_init!(init);
fn init(our: Address) {
println!("started");
let mut http_server = http::server::HttpServer::new(5);
http_api::init_frontend(&our, &mut http_server);
// state = state built from the filesystem, installed packages
// updates = state saved with get/set_state(), auto_update metadata.
let mut state = State::load().expect("state loading failed");
let mut updates = Updates::load();
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("main: got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(&our, &mut state, &mut http_server, &message) {
if let Err(e) =
handle_message(&our, &mut state, &mut updates, &mut http_server, &message)
{
let error_message = format!("error handling message: {e:?}");
print_to_terminal(1, &error_message);
Response::new()
@ -111,6 +113,7 @@ fn init(our: Address) {
fn handle_message(
our: &Address,
state: &mut State,
updates: &mut Updates,
http_server: &mut http::server::HttpServer,
message: &Message,
) -> anyhow::Result<()> {
@ -134,7 +137,7 @@ fn handle_message(
}
http_server.handle_request(
server_request,
|incoming| http_api::handle_http_request(our, state, &incoming),
|incoming| http_api::handle_http_request(our, state, updates, &incoming),
|_channel_id, _message_type, _blob| {
// not expecting any websocket messages from FE currently
},
@ -168,15 +171,17 @@ fn handle_message(
"auto download complete from non-local node"
));
}
match req {
AutoDownloadCompleteRequest::Success(succ) => {
// auto_install case:
// the downloads process has given us the new package manifest's
// capability hashes, and the old package's capability hashes.
// we can use these to determine if the new package has the same
// capabilities as the old one, and if so, auto-install it.
let manifest_hash = req.manifest_hash;
let package_id = req.download_info.package_id;
let version_hash = req.download_info.version_hash;
let manifest_hash = succ.manifest_hash;
let package_id = succ.package_id;
let version_hash = succ.version_hash;
let process_lib_package_id = package_id.clone().to_process_lib();
@ -191,17 +196,55 @@ fn handle_message(
if let Err(e) =
utils::install(&package_id, None, &version_hash, state, &our.node)
{
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
package.pending_update_hash = Some(version_hash);
}
println!("error auto-installing package: {e}");
// Get or create the outer map for this package
updates
.package_updates
.entry(package_id.to_process_lib())
.or_default()
.insert(
version_hash.clone(),
UpdateInfo {
errors: vec![],
pending_manifest_hash: Some(manifest_hash.clone()),
},
);
updates.save();
} else {
println!("auto-installed update for package: {process_lib_package_id}");
println!(
"auto-installed update for package: {process_lib_package_id}"
);
}
} else {
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
package.pending_update_hash = Some(version_hash);
println!("error auto-installing package: manifest hash mismatch");
// TODO.
updates
.package_updates
.entry(package_id.to_process_lib())
.or_default()
.insert(
version_hash.clone(),
UpdateInfo {
errors: vec![],
pending_manifest_hash: Some(manifest_hash.clone()),
},
);
updates.save();
}
}
AutoDownloadCompleteRequest::Err(err) => {
println!("error auto-downloading package: {err:?}");
updates
.package_updates
.entry(err.package_id.to_process_lib())
.or_default()
.insert(
err.version_hash.clone(),
UpdateInfo {
errors: err.tries,
pending_manifest_hash: None,
},
);
updates.save();
}
}
}

View File

@ -1,5 +1,5 @@
use crate::{utils, VFS_TIMEOUT};
use kinode_process_lib::{kimap, vfs, PackageId};
use crate::{kinode::process::downloads::DownloadError, utils, VFS_TIMEOUT};
use kinode_process_lib::{get_state, kimap, set_state, vfs, PackageId};
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
@ -54,9 +54,6 @@ pub struct PackageState {
/// capabilities have changed. if they have changed, auto-install must fail
/// and the user must approve the new capabilities.
pub manifest_hash: Option<String>,
/// stores the version hash of a failed auto-install attempt, which can be
/// later installed by the user by approving new caps.
pub pending_update_hash: Option<String>,
}
// this seems cleaner to me right now with pending_update_hash, but given how we serialize
@ -133,7 +130,6 @@ impl State {
verified: true, // implicitly verified (TODO re-evaluate)
caps_approved: false, // must re-approve if you want to do something ??
manifest_hash: Some(manifest_hash),
pending_update_hash: None, // ... this could be a separate state saved. don't want to reflect this info on-disk as a file.
},
);
@ -147,3 +143,76 @@ impl State {
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Updates {
#[serde(with = "package_id_map")]
pub package_updates: HashMap<PackageId, HashMap<String, UpdateInfo>>, // package id -> version_hash -> update info
}
impl Default for Updates {
fn default() -> Self {
Self {
package_updates: HashMap::new(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct UpdateInfo {
pub errors: Vec<(String, DownloadError)>, // errors collected by downloads process
pub pending_manifest_hash: Option<String>, // pending manifest hash that differed from the installed one
}
impl Updates {
pub fn load() -> Self {
let bytes = get_state();
if let Some(bytes) = bytes {
serde_json::from_slice(&bytes).unwrap_or_default()
} else {
Self::default()
}
}
pub fn save(&self) {
let bytes = serde_json::to_vec(self).unwrap_or_default();
set_state(&bytes);
}
}
// note: serde_json doesn't support non-string keys when serializing maps, so
// we have to use a custom simple serializer.
mod package_id_map {
use super::*;
use std::{collections::HashMap, str::FromStr};
pub fn serialize<S>(
map: &HashMap<PackageId, HashMap<String, UpdateInfo>>,
s: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeMap;
let mut map_ser = s.serialize_map(Some(map.len()))?;
for (k, v) in map {
map_ser.serialize_entry(&k.to_string(), v)?;
}
map_ser.end()
}
pub fn deserialize<'de, D>(
d: D,
) -> Result<HashMap<PackageId, HashMap<String, UpdateInfo>>, D::Error>
where
D: serde::Deserializer<'de>,
{
let string_map = HashMap::<String, HashMap<String, UpdateInfo>>::deserialize(d)?;
Ok(string_map
.into_iter()
.filter_map(|(k, v)| PackageId::from_str(&k).ok().map(|pid| (pid, v)))
.collect())
}
}

View File

@ -225,7 +225,6 @@ pub fn install(
verified: true, // sideloaded apps are implicitly verified because there is no "source" to verify against
caps_approved: true, // TODO see if we want to auto-approve local installs
manifest_hash: Some(manifest_hash),
pending_update_hash: None, // TODO: doublecheck if problematically overwrites auto_update state.
};
if let Ok(extracted) = extract_api(&process_package_id) {

View File

@ -11,7 +11,7 @@ alloy-primitives = "0.8.15"
alloy-sol-types = "0.8.15"
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }

View File

@ -33,14 +33,14 @@ use alloy_primitives::keccak256;
use alloy_sol_types::SolEvent;
use kinode::process::chain::ChainResponses;
use kinode_process_lib::{
await_message, call_init, eth, get_blob, get_state, http, kernel_types as kt, kimap,
print_to_terminal, println, timer, Address, Message, PackageId, Request, Response,
await_message, call_init, eth, get_blob, http, kernel_types as kt, kimap, print_to_terminal,
println,
sqlite::{self, Sqlite},
timer, Address, Message, PackageId, Request, Response,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, HashSet},
str::FromStr,
};
use std::collections::HashMap;
use std::str::FromStr;
wit_bindgen::generate!({
path: "target/wit",
@ -63,7 +63,6 @@ const KIMAP_ADDRESS: &str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C";
const DELAY_MS: u64 = 1_000; // 1s
#[derive(Debug, Serialize, Deserialize)]
pub struct State {
/// the kimap helper we are using
pub kimap: kimap::Kimap,
@ -71,10 +70,8 @@ pub struct State {
/// when we boot, we can read logs starting from this block and
/// rebuild latest state.
pub last_saved_block: u64,
/// onchain listings
pub listings: HashMap<PackageId, PackageListing>,
/// set of packages that we have published
pub published: HashSet<PackageId>,
/// tables: listings: <packade_id, listing>, published: vec<package_id>
pub db: DB,
}
/// listing information derived from metadata hash in listing event
@ -83,10 +80,9 @@ pub struct PackageListing {
pub tba: eth::Address,
pub metadata_uri: String,
pub metadata_hash: String,
// should this even be optional?
// relegate to only valid apps maybe?
pub metadata: Option<kt::Erc721Metadata>,
pub auto_update: bool,
pub block: u64,
}
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
@ -96,18 +92,287 @@ pub enum Req {
Request(ChainRequests),
}
pub struct DB {
inner: Sqlite,
}
impl DB {
pub fn connect(our: &Address) -> anyhow::Result<Self> {
let inner = sqlite::open(our.package_id(), "app_store_chain.sqlite", Some(10))?;
// create tables
inner.write(CREATE_META_TABLE.into(), vec![], None)?;
inner.write(CREATE_LISTINGS_TABLE.into(), vec![], None)?;
inner.write(CREATE_PUBLISHED_TABLE.into(), vec![], None)?;
Ok(Self { inner })
}
pub fn reset(&self, our: &Address) {
if let Err(e) = sqlite::remove_db(our.package_id(), "app_store_chain.sqlite", None) {
println!("failed to reset app_store DB: {e}");
}
}
pub fn get_last_saved_block(&self) -> anyhow::Result<u64> {
let query = "SELECT value FROM meta WHERE key = 'last_saved_block'";
let rows = self.inner.read(query.into(), vec![])?;
if let Some(row) = rows.get(0) {
if let Some(val_str) = row.get("value").and_then(|v| v.as_str()) {
if let Ok(block) = val_str.parse::<u64>() {
return Ok(block);
}
}
}
Ok(0)
}
pub fn set_last_saved_block(&self, block: u64) -> anyhow::Result<()> {
let query = "INSERT INTO meta (key, value) VALUES ('last_saved_block', ?)
ON CONFLICT(key) DO UPDATE SET value=excluded.value";
let params = vec![block.to_string().into()];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn insert_or_update_listing(
&self,
package_id: &PackageId,
listing: &PackageListing,
) -> anyhow::Result<()> {
let metadata_json = if let Some(m) = &listing.metadata {
serde_json::to_string(m)?
} else {
"".to_string()
};
let query = "INSERT INTO listings (package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(package_name, publisher_node)
DO UPDATE SET
tba=excluded.tba,
metadata_uri=excluded.metadata_uri,
metadata_hash=excluded.metadata_hash,
metadata_json=excluded.metadata_json,
auto_update=excluded.auto_update,
block=excluded.block";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
listing.tba.to_string().into(),
listing.metadata_uri.clone().into(),
listing.metadata_hash.clone().into(),
metadata_json.into(),
(if listing.auto_update { 1 } else { 0 }).into(),
listing.block.into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn delete_listing(&self, package_id: &PackageId) -> anyhow::Result<()> {
let query = "DELETE FROM listings WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn get_listing(&self, package_id: &PackageId) -> anyhow::Result<Option<PackageListing>> {
let query = "SELECT tba, metadata_uri, metadata_hash, metadata_json, auto_update, block FROM listings WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
let rows = self.inner.read(query.into(), params)?;
if let Some(row) = rows.get(0) {
Ok(Some(self.row_to_listing(row)?))
} else {
Ok(None)
}
}
pub fn get_all_listings(&self) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
let query = "SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block FROM listings";
let rows = self.inner.read(query.into(), vec![])?;
let mut listings = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
let listing = self.row_to_listing(&row)?;
listings.push((pid, listing));
}
Ok(listings)
}
pub fn get_listings_batch(
&self,
limit: u64,
offset: u64,
) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
let query = format!(
"SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block
FROM listings
ORDER BY package_name, publisher_node
LIMIT {} OFFSET {}",
limit, offset
);
let rows = self.inner.read(query, vec![])?;
let mut listings = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
let listing = self.row_to_listing(&row)?;
listings.push((pid, listing));
}
Ok(listings)
}
pub fn get_listings_since_block(
&self,
block_number: u64,
) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
let query = "SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block
FROM listings
WHERE block > ?";
let params = vec![block_number.into()];
let rows = self.inner.read(query.into(), params)?;
let mut listings = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
let listing = self.row_to_listing(&row)?;
listings.push((pid, listing));
}
Ok(listings)
}
pub fn row_to_listing(
&self,
row: &HashMap<String, serde_json::Value>,
) -> anyhow::Result<PackageListing> {
let tba_str = row["tba"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Invalid tba"))?;
let tba = tba_str.parse::<eth::Address>()?;
let metadata_uri = row["metadata_uri"].as_str().unwrap_or("").to_string();
let metadata_hash = row["metadata_hash"].as_str().unwrap_or("").to_string();
let metadata_json = row["metadata_json"].as_str().unwrap_or("");
let metadata: Option<kinode_process_lib::kernel_types::Erc721Metadata> =
if metadata_json.is_empty() {
None
} else {
serde_json::from_str(metadata_json)?
};
let auto_update = row["auto_update"].as_i64().unwrap_or(0) == 1;
let block = row["block"].as_i64().unwrap_or(0) as u64;
Ok(PackageListing {
tba,
metadata_uri,
metadata_hash,
metadata,
auto_update,
block,
})
}
pub fn get_published(&self, package_id: &PackageId) -> anyhow::Result<bool> {
let query = "SELECT 1 FROM published WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
let rows = self.inner.read(query.into(), params)?;
Ok(!rows.is_empty())
}
pub fn insert_published(&self, package_id: &PackageId) -> anyhow::Result<()> {
let query = "INSERT INTO published (package_name, publisher_node) VALUES (?, ?) ON CONFLICT DO NOTHING";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn delete_published(&self, package_id: &PackageId) -> anyhow::Result<()> {
let query = "DELETE FROM published WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn get_all_published(&self) -> anyhow::Result<Vec<PackageId>> {
let query = "SELECT package_name, publisher_node FROM published";
let rows = self.inner.read(query.into(), vec![])?;
let mut result = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
result.push(pid);
}
Ok(result)
}
}
const CREATE_META_TABLE: &str = "
CREATE TABLE IF NOT EXISTS meta (
key TEXT PRIMARY KEY,
value TEXT
);";
const CREATE_LISTINGS_TABLE: &str = "
CREATE TABLE IF NOT EXISTS listings (
package_name TEXT NOT NULL,
publisher_node TEXT NOT NULL,
tba TEXT NOT NULL,
metadata_uri TEXT,
metadata_hash TEXT,
metadata_json TEXT,
auto_update INTEGER NOT NULL DEFAULT 0,
block INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (package_name, publisher_node)
);";
const CREATE_PUBLISHED_TABLE: &str = "
CREATE TABLE IF NOT EXISTS published (
package_name TEXT NOT NULL,
publisher_node TEXT NOT NULL,
PRIMARY KEY (package_name, publisher_node)
);";
call_init!(init);
fn init(our: Address) {
println!(
"chain started, indexing on contract address {}",
KIMAP_ADDRESS
);
// create new provider with request-timeout of 60s
// can change, log requests can take quite a long time.
let eth_provider: eth::Provider = eth::Provider::new(CHAIN_ID, CHAIN_TIMEOUT);
let mut state = fetch_state(eth_provider);
fetch_and_subscribe_logs(&our, &mut state);
let db = DB::connect(&our).expect("failed to open DB");
let kimap_helper =
kimap::Kimap::new(eth_provider, eth::Address::from_str(KIMAP_ADDRESS).unwrap());
let last_saved_block = db.get_last_saved_block().unwrap_or(0);
let mut state = State {
kimap: kimap_helper,
last_saved_block,
db,
};
fetch_and_subscribe_logs(&our, &mut state, last_saved_block);
loop {
match await_message() {
@ -126,17 +391,15 @@ fn init(our: Address) {
fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow::Result<()> {
if !message.is_request() {
if message.is_local(&our) && message.source().process == "timer:distro:sys" {
// handling of ETH RPC subscriptions delayed by DELAY_MS
// to allow kns to have a chance to process block: handle now
let Some(context) = message.context() else {
return Err(anyhow::anyhow!("foo"));
return Err(anyhow::anyhow!("No context in timer message"));
};
let log = serde_json::from_slice(context)?;
handle_eth_log(our, state, log, false)?;
return Ok(());
}
} else {
match message.body().try_into()? {
match serde_json::from_slice::<Req>(message.body())? {
Req::Eth(eth_result) => {
if !message.is_local(our) || message.source().process != "eth:distro:sys" {
return Err(anyhow::anyhow!(
@ -154,7 +417,7 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
timer::set_timer(DELAY_MS, Some(serde_json::to_vec(log)?));
}
} else {
// attempt to resubscribe
// re-subscribe if error
state
.kimap
.provider
@ -162,7 +425,7 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
}
}
Req::Request(chains) => {
handle_local_request(state, chains)?;
handle_local_request(our, state, chains)?;
}
}
}
@ -170,51 +433,44 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
Ok(())
}
fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result<()> {
fn handle_local_request(
our: &Address,
state: &mut State,
req: ChainRequests,
) -> anyhow::Result<()> {
match req {
ChainRequests::GetApp(package_id) => {
let onchain_app = state
.listings
.get(&package_id.clone().to_process_lib())
.map(|app| OnchainApp {
package_id: package_id,
tba: app.tba.to_string(),
metadata_uri: app.metadata_uri.clone(),
metadata_hash: app.metadata_hash.clone(),
metadata: app.metadata.as_ref().map(|m| m.clone().into()),
auto_update: app.auto_update,
});
let pid = package_id.clone().to_process_lib();
let listing = state.db.get_listing(&pid)?;
let onchain_app = listing.map(|app| app.to_onchain_app(&pid));
let response = ChainResponses::GetApp(onchain_app);
Response::new().body(&response).send()?;
}
ChainRequests::GetApps => {
let apps: Vec<OnchainApp> = state
.listings
.iter()
.map(|(id, listing)| listing.to_onchain_app(id))
let listings = state.db.get_all_listings()?;
let apps: Vec<OnchainApp> = listings
.into_iter()
.map(|(pid, listing)| listing.to_onchain_app(&pid))
.collect();
let response = ChainResponses::GetApps(apps);
Response::new().body(&response).send()?;
}
ChainRequests::GetOurApps => {
let apps: Vec<OnchainApp> = state
.published
.iter()
.filter_map(|id| {
state
.listings
.get(id)
.map(|listing| listing.to_onchain_app(id))
})
.collect();
let published_list = state.db.get_all_published()?;
let mut apps = Vec::new();
for pid in published_list {
if let Some(listing) = state.db.get_listing(&pid)? {
apps.push(listing.to_onchain_app(&pid));
}
}
let response = ChainResponses::GetOurApps(apps);
Response::new().body(&response).send()?;
}
ChainRequests::StartAutoUpdate(package_id) => {
if let Some(listing) = state.listings.get_mut(&package_id.to_process_lib()) {
let pid = package_id.to_process_lib();
if let Some(mut listing) = state.db.get_listing(&pid)? {
listing.auto_update = true;
state.db.insert_or_update_listing(&pid, &listing)?;
let response = ChainResponses::AutoUpdateStarted;
Response::new().body(&response).send()?;
} else {
@ -223,8 +479,10 @@ fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result
}
}
ChainRequests::StopAutoUpdate(package_id) => {
if let Some(listing) = state.listings.get_mut(&package_id.to_process_lib()) {
let pid = package_id.to_process_lib();
if let Some(mut listing) = state.db.get_listing(&pid)? {
listing.auto_update = false;
state.db.insert_or_update_listing(&pid, &listing)?;
let response = ChainResponses::AutoUpdateStopped;
Response::new().body(&response).send()?;
} else {
@ -232,6 +490,11 @@ fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result
Response::new().body(&error_response).send()?;
}
}
ChainRequests::Reset => {
state.db.reset(&our);
Response::new().body(&ChainResponses::ResetOk).send()?;
panic!("resetting state, restarting!");
}
}
Ok(())
}
@ -258,7 +521,7 @@ fn handle_eth_log(
if package.is_empty() || publisher.is_empty() {
Err(anyhow::anyhow!("invalid publisher name"))
} else {
Ok(PackageId::new(&package, &publisher))
Ok(PackageId::new(package, publisher))
}
})?;
@ -267,7 +530,7 @@ fn handle_eth_log(
// at the URI.
let metadata_uri = String::from_utf8_lossy(&note.data).to_string();
let is_our_package = &package_id.publisher() == &our.node();
let is_our_package = package_id.publisher() == our.node();
let (tba, metadata_hash) = if !startup {
// generate ~metadata-hash full-path
@ -292,10 +555,12 @@ fn handle_eth_log(
match data {
None => {
// if ~metadata-uri is also empty, this is an unpublish action!
// unpublish if metadata_uri empty
if metadata_uri.is_empty() {
state.published.remove(&package_id);
state.listings.remove(&package_id);
state.db.delete_published(&package_id)?;
state.db.delete_listing(&package_id)?;
state.last_saved_block = block_number;
state.db.set_last_saved_block(block_number)?;
return Ok(());
}
return Err(anyhow::anyhow!(
@ -309,7 +574,7 @@ fn handle_eth_log(
};
if is_our_package {
state.published.insert(package_id.clone());
state.db.insert_published(&package_id)?;
}
// if this is a startup event, we don't need to fetch metadata from the URI --
@ -322,98 +587,28 @@ fn handle_eth_log(
None
};
match state.listings.entry(package_id.clone()) {
std::collections::hash_map::Entry::Occupied(mut listing) => {
let listing = listing.get_mut();
listing.metadata_uri = metadata_uri;
listing.tba = tba;
listing.metadata_hash = metadata_hash;
listing.metadata = metadata.clone();
}
std::collections::hash_map::Entry::Vacant(listing) => {
listing.insert(PackageListing {
let mut listing = state
.db
.get_listing(&package_id)?
.unwrap_or(PackageListing {
tba,
metadata_uri,
metadata_hash,
metadata_uri: metadata_uri.clone(),
metadata_hash: metadata_hash.clone(),
metadata: metadata.clone(),
auto_update: false,
block: block_number,
});
}
}
if !startup {
// if auto_update is enabled, send a message to downloads to kick off the update.
if let Some(listing) = state.listings.get(&package_id) {
if listing.auto_update {
print_to_terminal(0, &format!("kicking off auto-update for: {}", package_id));
Request::to(("our", "downloads", "app-store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id,
),
metadata: metadata.unwrap().into(),
}))
.send()
.unwrap();
}
}
}
state.last_saved_block = block_number;
Ok(())
}
/// after startup, fetch metadata for all listings
/// we do this as a separate step to not repeatedly fetch outdated metadata
/// as we process logs.
fn update_all_metadata(state: &mut State) {
state.listings.retain(|package_id, listing| {
let (tba, metadata_hash) = {
// generate ~metadata-hash full-path
let hash_note = format!(
"~metadata-hash.{}.{}",
package_id.package(),
package_id.publisher()
);
// owner can change which we don't track (yet?) so don't save, need to get when desired
let Ok((tba, _owner, data)) = (match state.kimap.get(&hash_note) {
Ok(gr) => Ok(gr),
Err(e) => match e {
eth::EthError::RpcError(_) => {
// retry on RpcError after DELAY_MS sleep
// sleep here rather than with, e.g., a message to
// `timer:distro:sys` so that events are processed in
// order of receipt
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
state.kimap.get(&hash_note)
}
_ => Err(e),
},
}) else {
return false;
};
match data {
None => {
// if ~metadata-uri is also empty, this is an unpublish action!
if listing.metadata_uri.is_empty() {
state.published.remove(package_id);
}
return false;
}
Some(hash_note) => (tba, String::from_utf8_lossy(&hash_note).to_string()),
}
};
// update fields
listing.tba = tba;
listing.metadata_uri = metadata_uri;
listing.metadata_hash = metadata_hash;
let metadata =
fetch_metadata_from_url(&listing.metadata_uri, &listing.metadata_hash, 30).ok();
listing.metadata = metadata.clone();
if listing.auto_update {
print_to_terminal(0, &format!("kicking off auto-update for: {}", package_id));
Request::to(("our", "downloads", "app-store", "sys"))
state.db.insert_or_update_listing(&package_id, &listing)?;
if !startup && listing.auto_update {
println!("kicking off auto-update for: {}", package_id);
Request::to(("our", "downloads", "app_store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id.clone(),
@ -423,8 +618,127 @@ fn update_all_metadata(state: &mut State) {
.send()
.unwrap();
}
true
});
if !startup {
state.last_saved_block = block_number;
state.db.set_last_saved_block(block_number)?;
}
Ok(())
}
/// after startup, fetch metadata for all listings
/// we do this as a separate step to not repeatedly fetch outdated metadata
/// as we process logs.
fn update_all_metadata(state: &mut State, last_saved_block: u64) {
let updated_listings = match state.db.get_listings_since_block(last_saved_block) {
Ok(listings) => listings,
Err(e) => {
print_to_terminal(
1,
&format!("error fetching updated listings since block {last_saved_block}: {e}"),
);
return;
}
};
for (pid, mut listing) in updated_listings {
let hash_note = format!("~metadata-hash.{}.{}", pid.package(), pid.publisher());
let (tba, metadata_hash) = match state.kimap.get(&hash_note) {
Ok((t, _o, data)) => {
match data {
None => {
// If metadata_uri empty, unpublish
if listing.metadata_uri.is_empty() {
if let Err(e) = state.db.delete_published(&pid) {
print_to_terminal(1, &format!("error deleting published: {e}"));
}
}
if let Err(e) = state.db.delete_listing(&pid) {
print_to_terminal(1, &format!("error deleting listing: {e}"));
}
continue;
}
Some(hash_note) => (t, String::from_utf8_lossy(&hash_note).to_string()),
}
}
Err(e) => {
// If RpcError, retry once after delay
if let eth::EthError::RpcError(_) = e {
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
match state.kimap.get(&hash_note) {
Ok((t, _o, data)) => {
if let Some(hash_note) = data {
(t, String::from_utf8_lossy(&hash_note).to_string())
} else {
// no data again after retry
if listing.metadata_uri.is_empty() {
if let Err(e) = state.db.delete_published(&pid) {
print_to_terminal(
1,
&format!("error deleting published: {e}"),
);
}
}
if let Err(e) = state.db.delete_listing(&pid) {
print_to_terminal(1, &format!("error deleting listing: {e}"));
}
continue;
}
}
Err(e2) => {
print_to_terminal(
1,
&format!("error retrieving metadata-hash after retry: {e2:?}"),
);
continue;
}
}
} else {
print_to_terminal(
1,
&format!("error retrieving metadata-hash: {e:?} for {pid}"),
);
continue;
}
}
};
// Update listing fields
listing.tba = tba;
listing.metadata_hash = metadata_hash;
let metadata =
match fetch_metadata_from_url(&listing.metadata_uri, &listing.metadata_hash, 30) {
Ok(md) => Some(md),
Err(err) => {
print_to_terminal(1, &format!("error fetching metadata for {}: {err}", pid));
None
}
};
listing.metadata = metadata.clone();
if let Err(e) = state.db.insert_or_update_listing(&pid, &listing) {
print_to_terminal(1, &format!("error updating listing {}: {e}", pid));
}
if listing.auto_update {
if let Some(md) = metadata {
print_to_terminal(0, &format!("kicking off auto-update for: {}", pid));
if let Err(e) = Request::to(("our", "downloads", "app_store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
pid.clone(),
),
metadata: md.into(),
}))
.send()
{
print_to_terminal(1, &format!("error sending auto-update request: {e}"));
}
}
}
}
}
/// create the filter used for app store getLogs and subscription.
@ -443,21 +757,25 @@ pub fn app_store_filter(state: &State) -> eth::Filter {
}
/// create a filter to fetch app store event logs from chain and subscribe to new events
pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State) {
pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State, last_saved_block: u64) {
let filter = app_store_filter(state);
// get past logs, subscribe to new ones.
// subscribe first so we don't miss any logs
println!("subscribing...");
state.kimap.provider.subscribe_loop(1, filter.clone());
for log in fetch_logs(
&state.kimap.provider,
&filter.from_block(state.last_saved_block),
) {
// println!("fetching old logs from block {last_saved_block}");
for log in fetch_logs(&state.kimap.provider, &filter.from_block(last_saved_block)) {
if let Err(e) = handle_eth_log(our, state, log, true) {
print_to_terminal(1, &format!("error ingesting log: {e}"));
};
}
update_all_metadata(state);
update_all_metadata(state, last_saved_block);
// save updated last_saved_block
if let Ok(block_number) = state.kimap.provider.get_block_number() {
state.last_saved_block = block_number;
state.db.set_last_saved_block(block_number).unwrap();
}
// println!("up to date to block {}", state.last_saved_block);
}
/// fetch logs from the chain with a given filter
@ -506,32 +824,6 @@ pub fn keccak_256_hash(bytes: &[u8]) -> String {
format!("0x{:x}", hasher.finalize())
}
/// fetch state from disk or create a new one if that fails
pub fn fetch_state(provider: eth::Provider) -> State {
if let Some(state_bytes) = get_state() {
match serde_json::from_slice::<State>(&state_bytes) {
Ok(state) => {
if state.kimap.address().to_string() == KIMAP_ADDRESS {
return state;
} else {
println!(
"state contract address mismatch. rebuilding state! expected {}, got {}",
KIMAP_ADDRESS,
state.kimap.address().to_string()
);
}
}
Err(e) => println!("failed to deserialize saved state, rebuilding: {e}"),
}
}
State {
kimap: kimap::Kimap::new(provider, eth::Address::from_str(KIMAP_ADDRESS).unwrap()),
last_saved_block: 0,
listings: HashMap::new(),
published: HashSet::new(),
}
}
// quite annoyingly, we must convert from our gen'd version of PackageId
// to the process_lib's gen'd version. this is in order to access custom
// Impls that we want to use

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }

View File

@ -42,13 +42,18 @@
//! mechanism is implemented in the FT worker for improved modularity and performance.
//!
use crate::kinode::process::downloads::{
AutoDownloadCompleteRequest, AutoUpdateRequest, DirEntry, DownloadCompleteRequest,
DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry, HashMismatch,
LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
AutoDownloadCompleteRequest, AutoDownloadError, AutoUpdateRequest, DirEntry,
DownloadCompleteRequest, DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry,
HashMismatch, LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
};
use std::{
collections::{HashMap, HashSet},
io::Read,
str::FromStr,
};
use std::{collections::HashSet, io::Read, str::FromStr};
use ft_worker_lib::{spawn_receive_transfer, spawn_send_transfer};
use kinode::process::downloads::AutoDownloadSuccess;
use kinode_process_lib::{
await_message, call_init, get_blob, get_state,
http::client,
@ -69,7 +74,6 @@ wit_bindgen::generate!({
mod ft_worker_lib;
pub const VFS_TIMEOUT: u64 = 5; // 5s
pub const APP_SHARE_TIMEOUT: u64 = 120; // 120s
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
#[serde(untagged)] // untagged as a meta-type for all incoming responses
@ -78,6 +82,15 @@ pub enum Resp {
HttpClient(Result<client::HttpClientResponse, client::HttpClientError>),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct AutoUpdateStatus {
mirrors_left: HashSet<String>, // set(node/url)
mirrors_failed: Vec<(String, DownloadError)>, // vec(node/url, error)
active_mirror: String, // (node/url)
}
type AutoUpdates = HashMap<(PackageId, String), AutoUpdateStatus>;
#[derive(Debug, Serialize, Deserialize)]
pub struct State {
// persisted metadata about which packages we are mirroring
@ -117,13 +130,11 @@ fn init(our: Address) {
let mut tmp =
vfs::open_dir("/app-store:sys/downloads/tmp", true, None).expect("could not open tmp");
let mut auto_updates: HashSet<(PackageId, String)> = HashSet::new();
// metadata for in-flight auto-updates
let mut auto_updates: AutoUpdates = HashMap::new();
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("downloads: got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(
&our,
@ -143,6 +154,33 @@ fn init(our: Address) {
.unwrap();
}
}
Err(send_error) => {
print_to_terminal(1, &format!("downloads: got network error: {send_error}"));
if let Some(context) = &send_error.context {
if let Ok(download_request) =
serde_json::from_slice::<LocalDownloadRequest>(&context)
{
let key = (
download_request.package_id.to_process_lib(),
download_request.desired_version_hash.clone(),
);
// Get the error first
let error = if send_error.kind.is_timeout() {
DownloadError::Timeout
} else if send_error.kind.is_offline() {
DownloadError::Offline
} else {
DownloadError::HandlingError(send_error.to_string())
};
// Then remove and get metadata
if let Some(metadata) = auto_updates.remove(&key) {
try_next_mirror(metadata, key, &mut auto_updates, error);
}
}
}
}
}
}
}
@ -157,7 +195,7 @@ fn handle_message(
message: &Message,
downloads: &mut Directory,
_tmp: &mut Directory,
auto_updates: &mut HashSet<(PackageId, String)>,
auto_updates: &mut AutoUpdates,
) -> anyhow::Result<()> {
if message.is_request() {
match message.body().try_into()? {
@ -174,8 +212,12 @@ fn handle_message(
} = download_request.clone();
if download_from.starts_with("http") {
// use http-client to GET it
Request::to(("our", "http-client", "distro", "sys"))
// use http_client to GET it
print_to_terminal(
1,
"kicking off http download for {package_id:?} and {version_hash:?}",
);
Request::to(("our", "http_client", "distro", "sys"))
.body(
serde_json::to_vec(&client::HttpClientAction::Http(
client::OutgoingHttpRequest {
@ -200,7 +242,6 @@ fn handle_message(
&package_id,
&desired_version_hash,
&download_from,
APP_SHARE_TIMEOUT,
)?;
Request::to((&download_from, "downloads", "app-store", "sys"))
@ -236,13 +277,8 @@ fn handle_message(
}
let target_worker = Address::from_str(&worker_address)?;
let _ = spawn_send_transfer(
our,
&package_id,
&desired_version_hash,
APP_SHARE_TIMEOUT,
&target_worker,
)?;
let _ =
spawn_send_transfer(our, &package_id, &desired_version_hash, &target_worker)?;
let resp = DownloadResponses::Success;
Response::new().body(&resp).send()?;
}
@ -257,50 +293,30 @@ fn handle_message(
if !message.is_local(our) {
return Err(anyhow::anyhow!("got non local download complete"));
}
// if we have a pending auto_install, forward that context to the main process.
// it will check if the caps_hashes match (no change in capabilities), and auto_install if it does.
let manifest_hash = if auto_updates.remove(&(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
)) {
match get_manifest_hash(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
) {
Ok(manifest_hash) => Some(manifest_hash),
Err(e) => {
print_to_terminal(
1,
&format!("auto_update: error getting manifest hash: {:?}", e),
);
None
}
}
} else {
None
};
// pushed to UI via websockets
Request::to(("our", "main", "app-store", "sys"))
// forward to main:app_store:sys, pushed to UI via websockets
Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&req)?)
.send()?;
// trigger auto-update install trigger to main:app-store:sys
if let Some(manifest_hash) = manifest_hash {
let auto_download_complete_req = AutoDownloadCompleteRequest {
download_info: req.clone(),
manifest_hash,
};
print_to_terminal(
1,
&format!(
"auto_update download complete: triggering install on main:app-store:sys"
),
// Check if this is an auto-update download
let key = (
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
);
Request::to(("our", "main", "app-store", "sys"))
.body(serde_json::to_vec(&auto_download_complete_req)?)
.send()?;
if let Some(metadata) = auto_updates.remove(&key) {
if let Some(err) = req.err {
try_next_mirror(metadata, key, auto_updates, err);
} else if let Err(_e) = handle_auto_update_success(key.0.clone(), key.1.clone())
{
try_next_mirror(
metadata,
key,
auto_updates,
DownloadError::InvalidManifest,
);
}
}
}
DownloadRequests::GetFiles(maybe_id) => {
@ -414,29 +430,61 @@ fn handle_message(
} = auto_update_request.clone();
let process_lib_package_id = package_id.clone().to_process_lib();
// default auto_update to publisher. TODO: more config here.
let download_from = metadata.properties.publisher;
// default auto_update to publisher
// let download_from = metadata.properties.publisher.clone();
let current_version = metadata.properties.current_version;
let code_hashes = metadata.properties.code_hashes;
// Create a HashSet of mirrors including the publisher
let mut mirrors = HashSet::new();
let download_from = if let Some(first_mirror) = metadata.properties.mirrors.first()
{
first_mirror.clone()
} else {
"randomnode111.os".to_string()
};
println!("first_download_from: {download_from}");
mirrors.extend(metadata.properties.mirrors.into_iter());
mirrors.insert(metadata.properties.publisher.clone());
let version_hash = code_hashes
.iter()
.find(|(version, _)| version == &current_version)
.map(|(_, hash)| hash.clone())
// note, if this errors, full on failure I thnk no?
// and bubble this up.
.ok_or_else(|| anyhow::anyhow!("auto_update: error for package_id: {}, current_version: {}, no matching hash found", process_lib_package_id.to_string(), current_version))?;
print_to_terminal(
1,
&format!(
"auto_update: kicking off download for {:?} from {} with version {} from mirror {}",
package_id, download_from, version_hash, download_from
),
);
let download_request = LocalDownloadRequest {
package_id,
download_from,
download_from: download_from.clone(),
desired_version_hash: version_hash.clone(),
};
// kick off local download to ourselves.
Request::to(("our", "downloads", "app-store", "sys"))
// Initialize auto-update status with mirrors
let key = (process_lib_package_id.clone(), version_hash.clone());
auto_updates.insert(
key,
AutoUpdateStatus {
mirrors_left: mirrors,
mirrors_failed: Vec::new(),
active_mirror: download_from.clone(),
},
);
// kick off local download to ourselves
Request::to(("our", "downloads", "app_store", "sys"))
.body(DownloadRequests::LocalDownload(download_request))
.send()?;
auto_updates.insert((process_lib_package_id, version_hash));
}
_ => {}
}
@ -445,19 +493,31 @@ fn handle_message(
Resp::Download(download_response) => {
// get context of the response.
// handled are errors or ok responses from a remote node.
// check state, do action based on that!
if let Some(context) = message.context() {
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
match download_response {
DownloadResponses::Err(e) => {
print_to_terminal(1, &format!("downloads: got error response: {e:?}"));
let key = (
download_request.package_id.clone().to_process_lib(),
download_request.desired_version_hash.clone(),
);
if let Some(metadata) = auto_updates.remove(&key) {
try_next_mirror(metadata, key, auto_updates, e);
} else {
// If not an auto-update, forward error normally
Request::to(("our", "main", "app_store", "sys"))
.body(DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash: download_request.desired_version_hash.clone(),
package_id: download_request.package_id,
version_hash: download_request.desired_version_hash,
err: Some(e),
})
.send()?;
}
}
DownloadResponses::Success => {
// todo: maybe we do something here.
print_to_terminal(
@ -477,29 +537,85 @@ fn handle_message(
return Err(anyhow::anyhow!("http-client response without context"));
};
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
if let Ok(client::HttpClientResponse::Http(client::HttpResponse {
status, ..
})) = resp
{
if status == 200 {
if let Err(e) = handle_receive_http_download(&download_request) {
print_to_terminal(
1,
&format!("error handling http-client response: {:?}", e),
let key = (
download_request.package_id.clone().to_process_lib(),
download_request.desired_version_hash.clone(),
);
Request::to(("our", "main", "app-store", "sys"))
.body(DownloadRequests::DownloadComplete(
DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash: download_request.desired_version_hash.clone(),
err: Some(e),
},
))
.send()?;
}
}
// Check if this is an auto-update request
let is_auto_update = auto_updates.contains_key(&key);
let metadata = if is_auto_update {
auto_updates.remove(&key)
} else {
println!("got http-client error: {resp:?}");
None
};
// Handle any non-200 response or client error
let Ok(client::HttpClientResponse::Http(resp)) = resp else {
if let Some(meta) = metadata {
let error = if let Err(e) = resp {
format!("HTTP client error: {e:?}")
} else {
"unexpected response type".to_string()
};
try_next_mirror(
meta,
key,
auto_updates,
DownloadError::HandlingError(error),
);
}
return Ok(());
};
if resp.status != 200 {
let error =
DownloadError::HandlingError(format!("HTTP status {}", resp.status));
handle_download_error(
is_auto_update,
metadata,
key,
auto_updates,
error,
&download_request,
)?;
return Ok(());
}
// Handle successful download
if let Err(e) = handle_receive_http_download(&download_request) {
print_to_terminal(1, &format!("error handling http_client response: {:?}", e));
handle_download_error(
is_auto_update,
metadata,
key,
auto_updates,
e,
&download_request,
)?;
} else if is_auto_update {
match handle_auto_update_success(key.0.clone(), key.1.clone()) {
Ok(_) => print_to_terminal(
1,
&format!(
"auto_update: successfully downloaded package {:?} version {}",
&download_request.package_id,
&download_request.desired_version_hash
),
),
Err(_) => {
if let Some(meta) = metadata {
try_next_mirror(
meta,
key,
auto_updates,
DownloadError::HandlingError(
"could not get manifest hash".to_string(),
),
);
}
}
}
}
}
}
@ -507,6 +623,70 @@ fn handle_message(
Ok(())
}
/// Try the next available mirror for a download, recording the current mirror's failure
fn try_next_mirror(
mut metadata: AutoUpdateStatus,
key: (PackageId, String),
auto_updates: &mut AutoUpdates,
error: DownloadError,
) {
print_to_terminal(
1,
&format!(
"auto_update: got error from mirror {mirror:?} {error:?}, trying next mirror: {next_mirror:?}",
next_mirror = metadata.mirrors_left.iter().next().cloned(),
mirror = metadata.active_mirror,
error = error
),
);
// Record failure and remove from available mirrors
metadata
.mirrors_failed
.push((metadata.active_mirror.clone(), error));
metadata.mirrors_left.remove(&metadata.active_mirror);
let (package_id, version_hash) = key.clone();
match metadata.mirrors_left.iter().next().cloned() {
Some(next_mirror) => {
metadata.active_mirror = next_mirror.clone();
auto_updates.insert(key, metadata);
Request::to(("our", "downloads", "app_store", "sys"))
.body(
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id,
),
download_from: next_mirror,
desired_version_hash: version_hash.clone(),
}))
.unwrap(),
)
.send()
.unwrap();
}
None => {
print_to_terminal(
1,
"auto_update: no more mirrors to try for package_id {package_id:?}",
);
// gather, and send error to main.
let node_tries = metadata.mirrors_failed;
let auto_download_error = AutoDownloadCompleteRequest::Err(AutoDownloadError {
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
version_hash,
tries: node_tries,
});
Request::to(("our", "main", "app_store", "sys"))
.body(auto_download_error)
.send()
.unwrap();
auto_updates.remove(&key);
}
}
}
fn handle_receive_http_download(
download_request: &LocalDownloadRequest,
) -> anyhow::Result<(), DownloadError> {
@ -558,6 +738,46 @@ fn handle_receive_http_download(
Ok(())
}
fn handle_download_error(
is_auto_update: bool,
metadata: Option<AutoUpdateStatus>,
key: (PackageId, String),
auto_updates: &mut AutoUpdates,
error: impl Into<DownloadError>,
download_request: &LocalDownloadRequest,
) -> anyhow::Result<()> {
let error = error.into();
if is_auto_update {
if let Some(meta) = metadata {
try_next_mirror(meta, key, auto_updates, error);
}
} else {
Request::to(("our", "main", "app_store", "sys"))
.body(DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash: download_request.desired_version_hash.clone(),
err: Some(error),
})
.send()?;
}
Ok(())
}
/// Handle auto-update success case by getting manifest hash and sending completion message
fn handle_auto_update_success(package_id: PackageId, version_hash: String) -> anyhow::Result<()> {
let manifest_hash = get_manifest_hash(package_id.clone(), version_hash.clone())?;
Request::to(("our", "main", "app_store", "sys"))
.body(AutoDownloadCompleteRequest::Success(AutoDownloadSuccess {
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
version_hash,
manifest_hash,
}))
.send()
.unwrap();
Ok(())
}
fn format_entries(entries: Vec<vfs::DirEntry>, state: &State) -> Vec<Entry> {
entries
.into_iter()

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }

View File

@ -17,7 +17,6 @@ pub fn spawn_send_transfer(
our: &Address,
package_id: &PackageId,
version_hash: &str,
timeout: u64,
to_addr: &Address,
) -> anyhow::Result<()> {
let transfer_id: u64 = rand::random();
@ -33,10 +32,7 @@ pub fn spawn_send_transfer(
return Err(anyhow::anyhow!("failed to spawn ft-worker!"));
};
let req = Request::new()
.target((&our.node, worker_process_id))
.expects_response(timeout + 1)
.body(
let req = Request::new().target((&our.node, worker_process_id)).body(
serde_json::to_vec(&DownloadRequests::RemoteDownload(RemoteDownloadRequest {
package_id: package_id.clone(),
desired_version_hash: version_hash.to_string(),
@ -58,7 +54,6 @@ pub fn spawn_receive_transfer(
package_id: &PackageId,
version_hash: &str,
from_node: &str,
timeout: u64,
) -> anyhow::Result<Address> {
let transfer_id: u64 = rand::random();
let timer_id = ProcessId::new(Some("timer"), "distro", "sys");
@ -75,7 +70,6 @@ pub fn spawn_receive_transfer(
let req = Request::new()
.target((&our.node, worker_process_id.clone()))
.expects_response(timeout + 1)
.body(
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
package_id: package_id.clone(),

View File

@ -29,6 +29,7 @@
//!
//! - Hash mismatches between the received file and the expected hash are detected and reported.
//! - Various I/O errors are caught and propagated.
//! - A 120 second killswitch is implemented to clean up dangling transfers.
//!
//! ## Integration with App Store:
//!
@ -61,6 +62,7 @@ wit_bindgen::generate!({
});
const CHUNK_SIZE: u64 = 262144; // 256KB
const KILL_SWITCH_MS: u64 = 120000; // 2 minutes
call_init!(init);
fn init(our: Address) {
@ -78,8 +80,7 @@ fn init(our: Address) {
}
// killswitch timer, 2 minutes. sender or receiver gets killed/cleaned up.
// TODO: killswitch update bubbles up to downloads process?
timer::set_timer(120000, None);
timer::set_timer(KILL_SWITCH_MS, None);
let start = std::time::Instant::now();
@ -105,7 +106,23 @@ fn init(our: Address) {
start.elapsed().as_millis()
),
),
Err(e) => print_to_terminal(1, &format!("ft_worker: receive error: {}", e)),
Err(e) => {
print_to_terminal(1, &format!("ft_worker: receive error: {}", e));
// bubble up to parent.
// TODO: doublecheck this.
// if this fires on a basic timeout, that's bad.
Request::new()
.body(DownloadRequests::DownloadComplete(
DownloadCompleteRequest {
package_id: package_id.clone().into(),
version_hash: desired_version_hash.to_string(),
err: Some(DownloadError::HandlingError(e.to_string())),
},
))
.target(parent_process)
.send()
.unwrap();
}
}
}
DownloadRequests::RemoteDownload(remote_request) => {
@ -187,6 +204,17 @@ fn handle_receiver(
loop {
let message = await_message()?;
if *message.source() == timer_address {
// send error message to downloads process
Request::new()
.body(DownloadRequests::DownloadComplete(
DownloadCompleteRequest {
package_id: package_id.clone().into(),
version_hash: version_hash.to_string(),
err: Some(DownloadError::Timeout),
},
))
.target(parent_process.clone())
.send()?;
return Ok(());
}
if !message.is_request() {

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -39,6 +39,7 @@
"eth:distro:sys",
"http-server:distro:sys",
"http-client:distro:sys",
"sqlite:distro:sys",
{
"process": "vfs:distro:sys",
"params": {
@ -52,6 +53,7 @@
"vfs:distro:sys",
"http-client:distro:sys",
"eth:distro:sys",
"sqlite:distro:sys",
"timer:distro:sys"
],
"public": false

View File

@ -1,11 +1,16 @@
import React from 'react';
import { Link } from 'react-router-dom';
import { Link, useLocation } from 'react-router-dom';
import { STORE_PATH, PUBLISH_PATH, MY_APPS_PATH } from '../constants/path';
import { ConnectButton } from '@rainbow-me/rainbowkit';
import { FaHome } from "react-icons/fa";
import NotificationBay from './NotificationBay';
import useAppsStore from '../store';
const Header: React.FC = () => {
const location = useLocation();
const { updates } = useAppsStore();
const updateCount = Object.keys(updates || {}).length;
return (
<header className="app-header">
<div className="header-left">
@ -15,7 +20,10 @@ const Header: React.FC = () => {
</button>
<Link to={STORE_PATH} className={location.pathname === STORE_PATH ? 'active' : ''}>Apps</Link>
<Link to={PUBLISH_PATH} className={location.pathname === PUBLISH_PATH ? 'active' : ''}>Publish</Link>
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>My Apps</Link>
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>
My Apps
{updateCount > 0 && <span className="update-badge">{updateCount}</span>}
</Link>
</nav>
</div>
<div className="header-right">
@ -25,4 +33,5 @@ const Header: React.FC = () => {
</header>
);
};
export default Header;

View File

@ -0,0 +1,69 @@
import React, { useState } from 'react';
import { FaExclamationTriangle } from 'react-icons/fa';
import useAppsStore from '../store';
const ResetButton: React.FC = () => {
const resetStore = useAppsStore(state => state.resetStore);
const [isOpen, setIsOpen] = useState(false);
const [isLoading, setIsLoading] = useState(false);
const handleReset = async () => {
try {
setIsLoading(true);
await resetStore();
setIsOpen(false);
} catch (error) {
console.error('Reset failed:', error);
alert('Failed to reset the app store. Please try again.');
} finally {
setIsLoading(false);
}
};
return (
<>
<button
onClick={() => setIsOpen(true)}
className="button danger"
style={{ fontSize: '0.9rem' }}
>
Reset Store
</button>
{isOpen && (
<div className="modal-overlay" onClick={() => setIsOpen(false)}>
<div className="modal-content" onClick={e => e.stopPropagation()}>
<button className="modal-close" onClick={() => setIsOpen(false)}>×</button>
<div style={{ display: 'flex', alignItems: 'center', gap: '0.75rem', marginBottom: '1rem' }}>
<FaExclamationTriangle size={24} style={{ color: 'var(--red)' }} />
<h3 style={{ margin: 0 }}>Warning</h3>
</div>
<p style={{ marginBottom: '1.5rem' }}>
This action will re-index all apps and reset the store state.
Only proceed if you know what you're doing.
</p>
<div style={{ display: 'flex', justifyContent: 'flex-end', gap: '0.75rem' }}>
<button
onClick={() => setIsOpen(false)}
className="button"
>
Cancel
</button>
<button
onClick={handleReset}
disabled={isLoading}
className="button danger"
>
{isLoading ? 'Resetting...' : 'Reset Store'}
</button>
</div>
</div>
</div>
)}
</>
);
};
export default ResetButton;

View File

@ -0,0 +1,16 @@
import React from 'react';
interface TooltipProps {
content: React.ReactNode;
children?: React.ReactNode;
}
export function Tooltip({ content, children }: TooltipProps) {
return (
<div className="tooltip-container">
{children}
<span className="tooltip-icon"></span>
<div className="tooltip-content">{content}</div>
</div>
);
}

View File

@ -3,3 +3,4 @@ export { default as MirrorSelector } from './MirrorSelector';
export { default as PackageSelector } from './PackageSelector';
export { default as ManifestDisplay } from './ManifestDisplay';
export { default as NotificationBay } from './NotificationBay';
export { default as ResetButton } from './ResetButton';

View File

@ -1,9 +1,37 @@
:root {
/* Core colors */
--orange: #ff7e33;
--dark-orange: #e56a24;
--orange-hover: #ff9900;
--red: #e53e3e;
--blue: #4299e1;
--green: #48bb78;
--gray: #718096;
/* Sophisticated neutrals */
--bg-light: #fdf6e3;
/* Solarized inspired beige */
--bg-dark: #1f1d24;
/* Deep slate with hint of purple */
--surface-light: #f5efd9;
/* Slightly deeper complementary beige */
--surface-dark: #2a2832;
/* Rich eggplant-tinged dark */
--text-light: #2d2a2e;
/* Warm charcoal */
--text-dark: #e8e6f0;
/* Cool moonlight white */
/* Border radius */
--border-radius: 8px;
}
/* Base styles */
body {
font-family: var(--font-family-main);
line-height: 1.6;
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--tan), var(--tasteful-dark));
color: light-dark(var(--text-light), var(--text-dark));
background-color: light-dark(var(--bg-light), var(--bg-dark));
}
/* Layout */
@ -35,7 +63,7 @@ a:hover {
/* Header */
.app-header {
background-color: light-dark(var(--off-white), var(--off-black));
background-color: light-dark(var(--surface-light), var(--surface-dark));
padding: 1rem;
margin-bottom: 1rem;
display: flex;
@ -71,12 +99,15 @@ a:hover {
text-decoration: none;
padding: 0.5rem;
border-radius: var(--border-radius);
position: relative;
display: inline-flex;
align-items: center;
}
.header-left nav a:hover,
.header-left nav a.active {
background-color: var(--orange);
color: var(--white);
color: var(--text-light);
}
/* Forms */
@ -91,6 +122,9 @@ form {
display: flex;
flex-direction: column;
margin-bottom: 1rem;
background: light-dark(var(--surface-light), var(--surface-dark));
padding: 0.75rem;
border-radius: var(--border-radius);
}
label {
@ -102,15 +136,21 @@ select {
padding: 0.5rem;
border: 1px solid var(--gray);
border-radius: var(--border-radius);
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
}
/* Buttons */
button {
display: inline-flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
height: 40px;
font-weight: 500;
padding: 0.5rem 1rem;
background-color: var(--orange);
color: var(--white);
color: var(--text-light);
border: none;
border-radius: var(--border-radius);
cursor: pointer;
@ -125,6 +165,36 @@ button:disabled {
cursor: not-allowed;
}
button.danger {
background-color: var(--red);
}
button.danger:hover {
background-color: color-mix(in srgb, var(--red) 85%, black);
}
/*Download Button */
.download-btn {
background: var(--orange);
color: var(--text-light);
border: none;
}
.download-btn:hover {
background: var(--dark-orange);
}
/* Notification Button */
/* .notification-btn {
background: var(--surface-dark);
color: var(--text);
border: 1px solid var(--gray);
}
.notification-btn:hover {
background: var(--surface-hover);
} */
/* Tables */
table {
width: 100%;
@ -151,6 +221,9 @@ td {
/* Messages */
.message {
display: flex;
align-items: center;
font-weight: 500;
padding: 1rem;
border-radius: var(--border-radius);
margin-bottom: 1rem;
@ -158,17 +231,18 @@ td {
.message.error {
background-color: var(--red);
color: var(--white);
color: var(--text-light);
}
.message.success {
background-color: var(--green);
color: var(--white);
background: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
border: 1px solid var(--green);
}
.message.info {
background-color: var(--blue);
color: var(--white);
color: var(--text-light);
}
/* Publisher Info */
@ -242,17 +316,24 @@ td {
align-items: center;
gap: 0.5rem;
color: var(--red);
margin-top: 0.5rem;
font-size: 0.9rem;
margin-top: 0.25rem;
}
/* App Page and Download Page shared styles */
/* Shared page styles */
.store-page,
.app-page,
.downloads-page {
background-color: light-dark(var(--white), var(--maroon));
.my-apps-page,
.downloads-page,
.publish-page {
padding: 1rem;
background: light-dark(var(--bg-light), var(--bg-dark));
margin: 0 1vw;
border-radius: var(--border-radius);
padding: 2rem;
width: 100%;
}
.app-info {
max-width: 20rem;
}
.app-header {
@ -268,12 +349,26 @@ td {
}
.app-info {
background-color: light-dark(var(--tan), var(--tasteful-dark));
background: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
padding: 1.5rem;
margin-bottom: 2rem;
}
/* Components with secondary backgrounds */
.app-header,
.app-info,
.app-description,
.form-group,
.search-bar input,
.version-selector,
.mirror-selector select,
.secondary,
.message.success {
background: light-dark(var(--surface-light), var(--surface-dark)) !important;
color: light-dark(var(--text-light), var(--text-dark));
}
/* Download Page specific styles */
.download-section {
display: flex;
@ -289,8 +384,8 @@ td {
padding: 0.5em;
border: 1px solid var(--gray);
border-radius: var(--border-radius);
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
}
/* Action Buttons */
@ -311,23 +406,23 @@ td {
.primary {
background-color: var(--orange);
color: var(--white);
color: var(--text-light);
}
.primary:hover:not(:disabled) {
background-color: var(--dark-orange);
color: var(--white);
color: var(--text-light);
}
.secondary {
background-color: light-dark(var(--off-white), var(--off-black));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: var(--orange);
border: 2px solid var(--orange);
}
.secondary:hover:not(:disabled) {
background-color: var(--orange);
color: var(--white);
color: var(--text-light);
}
.action-button:disabled,
@ -337,6 +432,21 @@ td {
cursor: not-allowed;
}
.action-button.download-button {
background: var(--orange);
color: var(--text-light);
border: none;
}
.action-button.download-button:hover:not(:disabled) {
background: var(--dark-orange);
}
.action-button.download-button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
/* App actions */
.app-actions {
display: flex;
@ -385,8 +495,8 @@ td {
}
.cap-approval-content {
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
padding: 2rem;
border-radius: 8px;
max-width: 80%;
@ -395,8 +505,8 @@ td {
}
.json-display {
background-color: light-dark(var(--tan), var(--off-black));
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
padding: 1rem;
border-radius: 4px;
white-space: pre-wrap;
@ -410,6 +520,44 @@ td {
margin-top: 1rem;
}
/* Search bar */
.search-bar {
width: 100%;
margin: 1rem auto 2rem;
position: relative;
}
.search-bar input {
width: 100%;
padding: 0.75rem 1rem 0.75rem 2.5rem;
border: 2px solid transparent;
border-radius: 2rem;
background: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
font-size: 1rem;
transition: all 0.2s ease;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
.search-bar input:focus {
outline: none;
border-color: var(--orange);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
}
.search-bar svg {
position: absolute;
left: 0.75rem;
top: 50%;
transform: translateY(-50%);
color: var(--gray);
pointer-events: none;
}
.search-bar input::placeholder {
color: var(--gray);
}
/* Responsive adjustments */
@media (max-width: 48em) {
@ -442,7 +590,7 @@ td {
}
.manifest-display {
background: light-dark(var(--white), var(--tasteful-dark));
background: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
padding: 1rem;
max-width: 600px;
@ -450,7 +598,7 @@ td {
.process-manifest {
margin-bottom: 0.5rem;
border: 1px solid light-dark(var(--gray), var(--off-black));
border: 1px solid light-dark(var(--gray), var(--surface-dark));
border-radius: var(--border-radius);
overflow: hidden;
}
@ -464,12 +612,12 @@ td {
background: none;
border: none;
cursor: pointer;
color: light-dark(var(--off-black), var(--off-white));
color: light-dark(var(--text-light), var(--text-dark));
transition: background-color 0.2s;
}
.process-header:hover {
background: light-dark(var(--tan), var(--off-black));
background-color: light-dark(var(--surface-light), var(--surface-dark));
}
.process-name {
@ -481,7 +629,7 @@ td {
.process-indicators {
display: flex;
gap: 0.5rem;
color: light-dark(var(--gray), var(--off-white));
color: light-dark(var(--gray), var(--text-dark));
}
.network-icon {
@ -498,8 +646,8 @@ td {
.process-details {
padding: 1rem;
background: light-dark(var(--tan), var(--off-black));
border-top: 1px solid light-dark(var(--gray), var(--off-black));
background: light-dark(var(--surface-light), var(--surface-dark));
border-top: 1px solid light-dark(var(--gray), var(--surface-dark));
}
.capability-section {
@ -512,13 +660,13 @@ td {
.capability-section h4 {
margin: 0 0 0.5rem 0;
color: light-dark(var(--off-black), var(--off-white));
color: light-dark(var(--text-light), var(--text-dark));
}
.capability-section ul {
margin: 0;
padding-left: 1.5rem;
color: light-dark(var(--gray), var(--off-white));
color: light-dark(var(--gray), var(--text-dark));
}
.capability-section li {
@ -538,7 +686,7 @@ td {
align-items: center;
gap: 0.5rem;
padding: 0.5rem;
color: light-dark(var(--off-black), var(--off-white));
color: light-dark(var(--text-light), var(--text-dark));
}
.notification-details {
@ -548,7 +696,7 @@ td {
width: 320px;
max-height: 400px;
overflow-y: auto;
background-color: light-dark(var(--white), var(--tasteful-dark));
background-color: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
z-index: 1000;
@ -557,7 +705,7 @@ td {
.badge {
background-color: var(--orange);
color: var(--white);
color: var(--text-light);
border-radius: 50%;
padding: 0.25rem 0.5rem;
font-size: 0.75rem;
@ -571,8 +719,8 @@ td {
padding: 1rem;
margin: 0.5rem 0;
border-radius: var(--border-radius);
background-color: light-dark(var(--tan), var(--off-black));
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
}
.notification-item.error {
@ -606,7 +754,7 @@ td {
background: none;
border: none;
cursor: pointer;
color: light-dark(var(--gray), var(--off-white));
color: light-dark(var(--gray), var(--text-dark));
padding: 0.25rem;
}
@ -617,7 +765,7 @@ td {
.progress-bar {
margin-top: 0.5rem;
height: 4px;
background-color: light-dark(var(--white), var(--off-black));
background-color: light-dark(var(--surface-light), var(--surface-dark));
border-radius: 2px;
overflow: hidden;
}
@ -643,8 +791,8 @@ td {
}
.modal-content {
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: light-dark(var(--text-light), var(--text-dark));
padding: 1.5rem;
border-radius: var(--border-radius);
position: relative;
@ -660,7 +808,7 @@ td {
background: none;
border: none;
cursor: pointer;
color: light-dark(var(--gray), var(--off-white));
color: light-dark(var(--gray), var(--text-dark));
padding: 0.25rem;
}
@ -693,7 +841,7 @@ td {
50%,
70% {
transform: translate3d(-4px, 0, 0);
}
}
40%,
60% {
@ -708,9 +856,511 @@ td {
50% {
opacity: 0.6;
}
}
100% {
opacity: 1;
}
}
/* Loading Spinner */
.loading-spinner {
display: inline-block;
width: 20px;
height: 20px;
margin-right: 8px;
border: 2px solid var(--text-light);
border-radius: 50%;
border-top-color: transparent;
animation: spin 1s linear infinite;
}
.loading-spinner.small {
width: 14px;
height: 14px;
margin-right: 6px;
border-width: 1.5px;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
/* Publish Page */
.publish-page {
padding: 1rem;
}
.publish-page h1 {
margin-bottom: 2rem;
}
.connect-wallet {
text-align: center;
padding: 2rem;
background: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
margin-bottom: 2rem;
}
.publish-form {
background: light-dark(var(--surface-light), var(--surface-dark));
padding: 2rem;
border-radius: var(--border-radius);
margin-bottom: 2rem;
}
.package-list {
list-style: none;
padding: 0;
margin: 0;
display: grid;
gap: 1rem;
}
.package-list li {
display: flex;
align-items: center;
justify-content: space-between;
padding: 1rem;
background: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
}
.package-list .app-name {
display: flex;
align-items: center;
gap: 1rem;
color: inherit;
text-decoration: none;
}
.package-list .app-name:hover {
color: var(--orange);
}
.package-icon {
width: 32px;
height: 32px;
border-radius: var(--border-radius);
}
.no-packages {
text-align: center;
padding: 2rem;
background: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
color: var(--gray);
}
/* Update badge */
.update-badge {
background: var(--red);
color: var(--text-light);
border-radius: 50%;
padding: 0.15rem 0.4rem;
font-size: 0.75rem;
position: absolute;
top: -5px;
right: -5px;
min-width: 18px;
height: 18px;
display: flex;
align-items: center;
justify-content: center;
font-weight: 600;
}
/* Updates section */
.updates-section {
margin-bottom: 2rem;
}
.section-title {
color: var(--orange);
font-size: 1.25rem;
margin-bottom: 1rem;
}
.updates-list {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.update-item {
background-color: light-dark(var(--surface-light), var(--surface-dark));
border-radius: var(--border-radius);
overflow: hidden;
border: 1px solid transparent;
}
.update-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 0.75rem 1rem;
cursor: pointer;
transition: background-color 0.2s;
}
.update-header:hover {
background-color: rgba(255, 255, 255, 0.05);
}
.update-title {
display: flex;
align-items: center;
gap: 0.75rem;
font-weight: 500;
}
.update-actions {
display: flex;
gap: 0.5rem;
}
.update-actions .action-button {
background: none;
border: none;
cursor: pointer;
color: var(--gray);
transition: color 0.2s;
display: flex;
align-items: center;
}
.update-actions .action-button.retry:hover {
color: var(--blue);
}
.update-actions .action-button.clear:hover {
color: var(--red);
}
.update-details {
padding: 0.75rem 1rem 1rem 2.25rem;
border-top: 1px solid rgba(255, 255, 255, 0.1);
}
.version-info {
color: var(--gray);
font-size: 0.9rem;
margin-bottom: 0.5rem;
}
.manifest-info {
color: var(--orange);
font-size: 0.9rem;
margin-bottom: 0.5rem;
}
.error-list {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.error-item {
display: flex;
align-items: center;
gap: 0.5rem;
color: var(--red);
font-size: 0.9rem;
}
.error-icon {
flex-shrink: 0;
}
/* App Page Layout */
.app-page {
max-width: 80rem;
margin: 0 auto;
padding: 2rem 1rem;
}
/* Updates Section */
.updates-section {
margin-bottom: 8;
}
.update-item {
border: 1px solid transparent;
border-radius: 0.5rem;
padding: 1rem;
margin-bottom: 1rem;
background-color: light-dark(var(--surface-light), var(--surface-dark));
border: 1px solid light-dark(var(--gray), var(--surface-dark));
}
.update-header {
display: flex;
align-items: center;
justify-content: space-between;
cursor: pointer;
}
.update-summary {
display: flex;
align-items: center;
gap: 0.5rem;
}
.update-details {
margin-top: 1rem;
color: light-dark(var(--text-secondary), var(--text));
}
.retry-button {
display: flex;
align-items: center;
gap: 0.5rem;
padding: 0.75rem 1rem;
border-radius: 0.375rem;
font-size: 1rem;
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: var(--orange);
border: 1px solid var(--orange);
transition: background-color 0.2s, color 0.2s;
}
.error-count {
padding: 0.25rem 0.5rem;
border-radius: 0.25rem;
font-size: 0.75rem;
background-color: light-dark(var(--red-100), var(--red-900));
color: light-dark(var(--red-700), var(--red-200));
}
/* Navigation */
.navigation {
display: flex;
align-items: center;
gap: 4;
margin-bottom: 1.5rem;
}
.nav-button {
display: flex;
align-items: center;
gap: 2;
padding: 0.75rem 1rem;
border-radius: 0.375rem;
font-size: 1rem;
background-color: light-dark(var(--surface-light), var(--surface-dark));
color: var(--orange);
border: 1px solid var(--orange);
transition: background-color 0.2s, color 0.2s;
}
.current-path {
font-size: 1rem;
color: light-dark(var(--text-secondary), var(--text));
}
.file-explorer {
border: 1px solid light-dark(var(--gray), var(--surface-dark));
padding: 1rem;
border-radius: var(--border-radius);
background: light-dark(var(--surface-light), var(--surface-dark));
}
.file-explorer h3 {
padding: 0.75rem 1rem;
font-size: 1.125rem;
font-weight: 500;
background-color: light-dark(var(--surface-light), var(--surface-dark));
border-bottom: 1px solid light-dark(var(--gray), var(--surface-dark));
}
.downloads-table {
width: 100%;
border-radius: var(--border-radius);
overflow: hidden;
}
.downloads-table th {
padding: 0.75rem 1rem;
font-size: 1rem;
font-weight: 500;
text-align: left;
color: light-dark(var(--text-secondary), var(--text));
background-color: light-dark(var(--surface-light), var(--surface-dark));
border-bottom: 1px solid light-dark(var(--gray), var(--surface-dark));
}
.downloads-table td {
padding: 0.75rem 1rem;
font-size: 1rem;
border-bottom: 1px solid light-dark(var(--gray), var(--surface-dark));
}
.downloads-table tr.file:hover,
.downloads-table tr.directory:hover {
background-color: light-dark(var(--surface-light), var(--surface-dark));
cursor: pointer;
}
.updates-section {
background: light-dark(var(--surface-light), var(--surface-dark));
padding: 1rem;
margin-bottom: 1rem;
border-radius: var(--border-radius);
}
.tooltip-container {
position: relative;
display: inline-flex;
align-items: center;
gap: 4px;
}
.tooltip-icon {
cursor: help;
color: #666;
font-size: 14px;
position: relative;
}
.tooltip-content {
position: absolute;
left: 24px;
top: -4px;
background: #333;
color: white;
padding: 8px 12px;
border-radius: 4px;
font-size: 14px;
white-space: nowrap;
z-index: 1000;
opacity: 0;
visibility: hidden;
transition: opacity 0.3s ease, visibility 0.3s ease;
min-width: max-content;
}
/* Create an invisible bridge between icon and content */
.tooltip-content::after {
content: '';
position: absolute;
left: -20px; /* Cover the gap between icon and content */
top: 0;
width: 20px;
height: 100%;
background: transparent;
}
.tooltip-container:hover .tooltip-content {
opacity: 1;
visibility: visible;
transition-delay: 0.2s;
}
.tooltip-content:hover {
opacity: 1 !important;
visibility: visible !important;
}
.tooltip-content::before {
content: '';
position: absolute;
left: -4px;
top: 8px;
border-top: 4px solid transparent;
border-bottom: 4px solid transparent;
border-right: 4px solid #333;
}
.tooltip-content a {
color: #fff;
text-decoration: underline;
}
.tooltip-content a:hover {
text-decoration: none;
}
.wallet-status {
display: flex;
align-items: center;
gap: 4px;
}
.page-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 2rem;
}
.header-actions {
display: flex;
gap: 1rem;
align-items: center;
}
/* Modal styles */
.modal-overlay {
position: fixed;
inset: 0;
background-color: rgba(0, 0, 0, 0.5);
display: flex;
align-items: center;
justify-content: center;
z-index: 50;
}
.modal-container {
padding: 1rem;
width: 100%;
max-width: 28rem;
}
.modal-content {
background-color: light-dark(var(--surface-light), var(--surface-dark));
padding: 1.5rem;
border-radius: var(--border-radius);
}
.modal-header {
display: flex;
align-items: center;
gap: 0.75rem;
margin-bottom: 1rem;
}
.modal-title {
font-size: 1.25rem;
font-weight: 500;
margin: 0;
color: inherit;
}
.modal-description {
color: light-dark(var(--text-light), var(--text-dark));
margin-bottom: 1.5rem;
}
.modal-footer {
display: flex;
justify-content: flex-end;
gap: 0.75rem;
}
/* Button variants */
.danger-button {
background-color: var(--red) !important;
color: white !important;
}
.danger-button:hover {
opacity: 0.9;
}
.danger-button:disabled {
opacity: 0.5;
cursor: not-allowed;
}

View File

@ -148,6 +148,12 @@ export default function AppPage() {
{latestVersion && (
<li><span>Latest Version:</span> <span>{latestVersion}</span></li>
)}
{installedApp?.pending_update_hash && (
<li className="warning">
<span>Failed Auto-Update:</span>
<span>Update to version with hash {installedApp.pending_update_hash.slice(0, 8)}... failed, approve newly requested capabilities and install it here:</span>
</li>
)}
<li><span>Publisher:</span> <span>{app.package_id.publisher_node}</span></li>
<li><span>License:</span> <span>{app.metadata?.properties?.license || "Not specified"}</span></li>
<li>

View File

@ -1,7 +1,9 @@
import React, { useState, useEffect } from "react";
import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck, FaTrash } from "react-icons/fa";
import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck, FaTrash, FaExclamationTriangle, FaTimesCircle, FaChevronDown, FaChevronRight } from "react-icons/fa";
import { useNavigate } from "react-router-dom";
import useAppsStore from "../store";
import { DownloadItem, PackageManifest, PackageState } from "../types/Apps";
import { ResetButton} from "../components";
import { DownloadItem, PackageManifestEntry, PackageState, Updates, DownloadError, UpdateInfo } from "../types/Apps";
// Core packages that cannot be uninstalled
const CORE_PACKAGES = [
@ -16,6 +18,7 @@ const CORE_PACKAGES = [
];
export default function MyAppsPage() {
const navigate = useNavigate();
const {
fetchDownloads,
fetchDownloadsForApp,
@ -25,16 +28,20 @@ export default function MyAppsPage() {
removeDownload,
fetchInstalled,
installed,
uninstallApp
uninstallApp,
fetchUpdates,
clearUpdates,
updates
} = useAppsStore();
const [currentPath, setCurrentPath] = useState<string[]>([]);
const [items, setItems] = useState<DownloadItem[]>([]);
const [expandedUpdates, setExpandedUpdates] = useState<Set<string>>(new Set());
const [isInstalling, setIsInstalling] = useState(false);
const [isUninstalling, setIsUninstalling] = useState(false);
const [error, setError] = useState<string | null>(null);
const [showCapApproval, setShowCapApproval] = useState(false);
const [manifest, setManifest] = useState<PackageManifest | null>(null);
const [manifest, setManifest] = useState<PackageManifestEntry | null>(null);
const [selectedItem, setSelectedItem] = useState<DownloadItem | null>(null);
const [showUninstallConfirm, setShowUninstallConfirm] = useState(false);
const [appToUninstall, setAppToUninstall] = useState<any>(null);
@ -42,6 +49,7 @@ export default function MyAppsPage() {
useEffect(() => {
loadItems();
fetchInstalled();
fetchUpdates();
}, [currentPath]);
const loadItems = async () => {
@ -59,34 +67,132 @@ export default function MyAppsPage() {
}
};
const initiateUninstall = (app: any) => {
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
if (CORE_PACKAGES.includes(packageId)) {
setError("Cannot uninstall core system packages");
return;
}
setAppToUninstall(app);
setShowUninstallConfirm(true);
const handleClearUpdates = async (packageId: string) => {
await clearUpdates(packageId);
fetchUpdates(); // Refresh updates after clearing
};
const handleUninstall = async () => {
if (!appToUninstall) return;
setIsUninstalling(true);
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
try {
await uninstallApp(packageId);
await fetchInstalled();
await loadItems();
setShowUninstallConfirm(false);
setAppToUninstall(null);
} catch (error) {
console.error('Uninstallation failed:', error);
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
} finally {
setIsUninstalling(false);
const toggleUpdateExpansion = (packageId: string) => {
setExpandedUpdates(prev => {
const newSet = new Set(prev);
if (newSet.has(packageId)) {
newSet.delete(packageId);
} else {
newSet.add(packageId);
}
return newSet;
});
};
const formatError = (error: DownloadError): string => {
if (typeof error === 'string') {
return error;
} else if ('HashMismatch' in error) {
return `Hash mismatch (expected ${error.HashMismatch.desired.slice(0, 8)}, got ${error.HashMismatch.actual.slice(0, 8)})`;
} else if ('HandlingError' in error) {
return error.HandlingError;
} else if ('Timeout' in error) {
return 'Connection timed out';
}
return 'Unknown error';
};
const renderUpdates = () => {
if (!updates || Object.keys(updates).length === 0) {
return (
<div className="updates-section">
<h2>Failed Auto Updates (0)</h2>
<p>None found, all clear!</p>
</div>
);
}
return (
<div className="updates-section">
<h2 className="section-title">Failed Auto Updates ({Object.keys(updates).length})</h2>
{Object.keys(updates).length > 0 ? (
<div className="updates-list">
{Object.entries(updates).map(([packageId, versionMap]) => {
const totalErrors = Object.values(versionMap).reduce((sum, info) =>
sum + (info.errors?.length || 0), 0);
const hasManifestChanges = Object.values(versionMap).some(info =>
info.pending_manifest_hash);
return (
<div key={packageId} className="update-item error">
<div className="update-header" onClick={() => toggleUpdateExpansion(packageId)}>
<div className="update-title">
{expandedUpdates.has(packageId) ? <FaChevronDown /> : <FaChevronRight />}
<FaExclamationTriangle className="error-badge" />
<span>{packageId}</span>
<div className="update-summary">
{totalErrors > 0 && (
<span className="error-count">{totalErrors} error{totalErrors !== 1 ? 's' : ''}</span>
)}
{hasManifestChanges && (
<span className="manifest-badge">Manifest changes pending</span>
)}
</div>
</div>
<div className="update-actions">
<button
className="action-button retry"
onClick={(e) => {
e.stopPropagation();
navigate(`/download/${packageId}`);
}}
title="Retry download"
>
<FaSync />
<span>Retry</span>
</button>
<button
className="action-button clear"
onClick={(e) => {
e.stopPropagation();
handleClearUpdates(packageId);
}}
title="Clear update info"
>
<FaTimesCircle />
</button>
</div>
</div>
{expandedUpdates.has(packageId) && Object.entries(versionMap).map(([versionHash, info]) => (
<div key={versionHash} className="update-details">
<div className="version-info">
Version: {versionHash.slice(0, 8)}...
</div>
{info.pending_manifest_hash && (
<div className="manifest-info">
<FaExclamationTriangle />
Pending manifest: {info.pending_manifest_hash.slice(0, 8)}...
</div>
)}
{info.errors && info.errors.length > 0 && (
<div className="error-list">
{info.errors.map(([source, error], idx) => (
<div key={idx} className="error-item">
<FaExclamationTriangle className="error-icon" />
<span>{source}: {formatError(error)}</span>
</div>
))}
</div>
)}
</div>
))}
</div>
);
})}
</div>
) : (
<div className="empty-state">
No failed auto updates found.
</div>
)}
</div>
);
};
const navigateToItem = (item: DownloadItem) => {
if (item.Dir) {
@ -173,11 +279,57 @@ export default function MyAppsPage() {
return Object.values(installed).some(app => app.package_id.package_name === packageName);
};
return (
<div className="downloads-page">
<h2>My Apps</h2>
const initiateUninstall = (app: any) => {
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
if (CORE_PACKAGES.includes(packageId)) {
setError("Cannot uninstall core system packages");
return;
}
setAppToUninstall(app);
setShowUninstallConfirm(true);
};
{/* Installed Apps Section */}
const handleUninstall = async () => {
if (!appToUninstall) return;
setIsUninstalling(true);
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
try {
await uninstallApp(packageId);
await fetchInstalled();
await loadItems();
setShowUninstallConfirm(false);
setAppToUninstall(null);
} catch (error) {
console.error('Uninstallation failed:', error);
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
} finally {
setIsUninstalling(false);
}
};
return (
<div className="my-apps-page">
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: '2rem' }}>
<h1>My Apps</h1>
<ResetButton />
</div>
{error && <div className="error-message">{error}</div>}
{renderUpdates()}
{/* Navigation */}
<div className="navigation">
{currentPath.length > 0 && (
<button onClick={() => setCurrentPath([])} className="nav-button">
<FaChevronLeft /> Back
</button>
)}
<div className="current-path">
{currentPath.length === 0 ? 'Downloads' : currentPath.join('/')}
</div>
</div>
{/* Items Table */}
<div className="items-table-container">
<div className="file-explorer">
<h3>Installed Apps</h3>
<table className="downloads-table">
@ -214,7 +366,6 @@ export default function MyAppsPage() {
</table>
</div>
{/* Downloads Section */}
<div className="file-explorer">
<h3>Downloads</h3>
<div className="path-navigation">
@ -274,12 +425,7 @@ export default function MyAppsPage() {
</tbody>
</table>
</div>
{error && (
<div className="error-message">
{error}
</div>
)}
{/* Uninstall Confirmation Modal */}
{showUninstallConfirm && appToUninstall && (
@ -318,8 +464,6 @@ export default function MyAppsPage() {
</div>
)}
{showCapApproval && manifest && (
<div className="cap-approval-popup">
<div className="cap-approval-content">

View File

@ -7,12 +7,13 @@ import { mechAbi, KIMAP, encodeIntoMintCall, encodeMulticalls, kimapAbi, MULTICA
import { kinohash } from '../utils/kinohash';
import useAppsStore from "../store";
import { PackageSelector } from "../components";
import { Tooltip } from '../components/Tooltip';
const NAME_INVALID = "Package name must contain only valid characters (a-z, 0-9, -, and .)";
export default function PublishPage() {
const { openConnectModal } = useConnectModal();
const { ourApps, fetchOurApps, downloads } = useAppsStore();
const { ourApps, fetchOurApps, downloads, fetchDownloadsForApp } = useAppsStore();
const publicClient = usePublicClient();
const { address, isConnected, isConnecting } = useAccount();
@ -23,6 +24,7 @@ export default function PublishPage() {
});
const [packageName, setPackageName] = useState<string>("");
// @ts-ignore
const [publisherId, setPublisherId] = useState<string>(window.our?.node || "");
const [metadataUrl, setMetadataUrl] = useState<string>("");
const [metadataHash, setMetadataHash] = useState<string>("");
@ -34,6 +36,26 @@ export default function PublishPage() {
fetchOurApps();
}, [fetchOurApps]);
useEffect(() => {
if (packageName && publisherId) {
const id = `${packageName}:${publisherId}`;
fetchDownloadsForApp(id);
}
}, [packageName, publisherId, fetchDownloadsForApp]);
useEffect(() => {
if (isConfirmed) {
// Fetch our apps again after successful publish
fetchOurApps();
// Reset form fields
setPackageName("");
// @ts-ignore
setPublisherId(window.our?.node || "");
setMetadataUrl("");
setMetadataHash("");
}
}, [isConfirmed, fetchOurApps]);
const validatePackageName = useCallback((name: string) => {
// Allow lowercase letters, numbers, hyphens, and dots
const validNameRegex = /^[a-z0-9.-]+$/;
@ -69,9 +91,12 @@ export default function PublishPage() {
// Check if code_hashes exist in metadata and is an object
if (metadata.properties && metadata.properties.code_hashes && typeof metadata.properties.code_hashes === 'object') {
const codeHashes = metadata.properties.code_hashes;
const missingHashes = Object.entries(codeHashes).filter(([version, hash]) =>
!downloads[`${packageName}:${publisherId}`]?.some(d => d.File?.name === `${hash}.zip`)
);
console.log('Available downloads:', downloads[`${packageName}:${publisherId}`]);
const missingHashes = Object.entries(codeHashes).filter(([version, hash]) => {
const hasDownload = downloads[`${packageName}:${publisherId}`]?.some(d => d.File?.name === `${hash}.zip`);
return !hasDownload;
});
if (missingHashes.length > 0) {
setMetadataError(`Missing local downloads for mirroring versions: ${missingHashes.map(([version]) => version).join(', ')}`);
@ -163,12 +188,6 @@ export default function PublishPage() {
gas: BigInt(1000000),
});
// Reset form fields
setPackageName("");
setPublisherId(window.our?.node || "");
setMetadataUrl("");
setMetadataHash("");
} catch (error) {
console.error(error);
}
@ -223,22 +242,31 @@ export default function PublishPage() {
return (
<div className="publish-page">
<h1>Publish Package</h1>
{Boolean(address) && (
<div className="publisher-info">
<span>Publishing as:</span>
<span className="address">{address?.slice(0, 4)}...{address?.slice(-4)}</span>
{!address ? (
<div className="wallet-status">
<button onClick={() => openConnectModal?.()}>Connect Wallet</button>
</div>
) : (
<div className="wallet-status">
Connected: {address.slice(0, 6)}...{address.slice(-4)}
<Tooltip content="Make sure the wallet you're connecting to publish is the same as the owner for the publisher!" />
</div>
)}
{isConfirming ? (
<div className="message info">Publishing package...</div>
<div className="message info">
<div className="loading-spinner"></div>
<span>Publishing package...</span>
</div>
) : !address || !isConnected ? (
<>
<div className="connect-wallet">
<h4>Please connect your wallet to publish a package</h4>
<ConnectButton />
</>
</div>
) : isConnecting ? (
<div className="message info">Approve connection in your wallet</div>
<div className="message info">
<div className="loading-spinner"></div>
<span>Approve connection in your wallet</span>
</div>
) : (
<form className="publish-form" onSubmit={publishPackage}>
<div className="form-group">
@ -248,33 +276,36 @@ export default function PublishPage() {
</div>
<div className="form-group">
<label htmlFor="metadata-url">Metadata URL</label>
<div style={{ display: 'flex', alignItems: 'center', gap: '4px' }}>
<label>Metadata URL</label>
<Tooltip content={<>add a link to metadata.json here (<a href="https://raw.githubusercontent.com/kinode-dao/kit/47cdf82f70b36f2a102ddfaaeed5efa10d7ef5b9/src/new/templates/rust/ui/chat/metadata.json" target="_blank" rel="noopener noreferrer">example link</a>)</>} />
</div>
<input
id="metadata-url"
type="text"
required
value={metadataUrl}
onChange={(e) => setMetadataUrl(e.target.value)}
onBlur={calculateMetadataHash}
placeholder="https://github/my-org/my-repo/metadata.json"
/>
<p className="help-text">
Metadata is a JSON file that describes your package.
</p>
{metadataError && <p className="error-message">{metadataError}</p>}
</div>
<div className="form-group">
<label htmlFor="metadata-hash">Metadata Hash</label>
<label>Metadata Hash</label>
<input
readOnly
id="metadata-hash"
type="text"
value={metadataHash}
placeholder="Calculated automatically from metadata URL"
/>
</div>
<button type="submit" disabled={isConfirming || nameValidity !== null}>
{isConfirming ? 'Publishing...' : 'Publish'}
<button type="submit" disabled={isConfirming || nameValidity !== null || Boolean(metadataError)}>
{isConfirming ? (
<>
<div className="loading-spinner small"></div>
<span>Publishing...</span>
</>
) : (
'Publish'
)}
</button>
</form>
)}
@ -293,21 +324,24 @@ export default function PublishPage() {
<div className="my-packages">
<h2>Packages You Own</h2>
{Object.keys(ourApps).length > 0 ? (
<ul>
<ul className="package-list">
{Object.values(ourApps).map((app) => (
<li key={`${app.package_id.package_name}:${app.package_id.publisher_node}`}>
<Link to={`/app/${app.package_id.package_name}:${app.package_id.publisher_node}`} className="app-name">
{app.metadata?.name || app.package_id.package_name}
{app.metadata?.image && (
<img src={app.metadata.image} alt="" className="package-icon" />
)}
<span>{app.metadata?.name || app.package_id.package_name}</span>
</Link>
<button onClick={() => unpublishPackage(app.package_id.package_name, app.package_id.publisher_node)}>
<button onClick={() => unpublishPackage(app.package_id.package_name, app.package_id.publisher_node)} className="danger">
Unpublish
</button>
</li>
))}
</ul>
) : (
<p>No packages published</p>
<p className="no-packages">No packages published</p>
)}
</div>
</div>

View File

@ -2,13 +2,15 @@ import React, { useState, useEffect } from "react";
import useAppsStore from "../store";
import { AppListing } from "../types/Apps";
import { Link } from "react-router-dom";
import { FaSearch } from "react-icons/fa";
export default function StorePage() {
const { listings, fetchListings } = useAppsStore();
const { listings, fetchListings, fetchUpdates } = useAppsStore();
const [searchQuery, setSearchQuery] = useState<string>("");
useEffect(() => {
fetchListings();
fetchUpdates();
}, [fetchListings]);
// extensive temp null handling due to weird prod bug
@ -25,12 +27,15 @@ export default function StorePage() {
return (
<div className="store-page">
<div className="store-header">
<div className="search-bar">
<input
type="text"
placeholder="Search apps..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
/>
<FaSearch />
</div>
</div>
<div className="app-list">
{!listings ? (

View File

@ -1,6 +1,6 @@
import { create } from 'zustand'
import { persist } from 'zustand/middleware'
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification } from '../types/Apps'
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification, UpdateInfo } from '../types/Apps'
import { HTTP_STATUS } from '../constants/http'
import KinodeClientApi from "@kinode/client-api"
import { WEBSOCKET_URL } from '../utils/ws'
@ -16,6 +16,7 @@ interface AppsStore {
notifications: Notification[]
homepageApps: HomepageApp[]
activeDownloads: Record<string, { downloaded: number, total: number }>
updates: Record<string, UpdateInfo>
fetchData: (id: string) => Promise<void>
fetchListings: () => Promise<void>
@ -26,6 +27,7 @@ interface AppsStore {
fetchOurApps: () => Promise<void>
fetchDownloadsForApp: (id: string) => Promise<DownloadItem[]>
checkMirror: (node: string) => Promise<MirrorCheckFile | null>
resetStore: () => Promise<void>
fetchHomepageApps: () => Promise<void>
getLaunchUrl: (id: string) => string | null
@ -48,6 +50,8 @@ interface AppsStore {
clearActiveDownload: (appId: string) => void
clearAllActiveDownloads: () => void;
fetchUpdates: () => Promise<void>
clearUpdates: (packageId: string) => Promise<void>
}
const useAppsStore = create<AppsStore>()((set, get) => ({
@ -58,7 +62,7 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
activeDownloads: {},
homepageApps: [],
notifications: [],
updates: {},
fetchData: async (id: string) => {
if (!id) return;
@ -380,6 +384,55 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
});
},
fetchUpdates: async () => {
try {
const res = await fetch(`${BASE_URL}/updates`);
if (res.status === HTTP_STATUS.OK) {
const updates = await res.json();
set({ updates });
}
} catch (error) {
console.error("Error fetching updates:", error);
}
},
clearUpdates: async (packageId: string) => {
try {
await fetch(`${BASE_URL}/updates/${packageId}/clear`, {
method: 'POST',
});
set((state) => {
const newUpdates = { ...state.updates };
delete newUpdates[packageId];
return { updates: newUpdates };
});
} catch (error) {
console.error("Error clearing updates:", error);
}
},
resetStore: async () => {
try {
const response = await fetch(`${BASE_URL}/reset`, {
method: 'POST',
});
if (!response.ok) {
throw new Error('Reset failed');
}
// Refresh the store data
await Promise.all([
get().fetchInstalled(),
get().fetchListings(),
get().fetchUpdates(),
]);
} catch (error) {
console.error('Reset failed:', error);
throw error;
}
},
ws: new KinodeClientApi({
uri: WEBSOCKET_URL,
nodeId: (window as any).our?.node,
@ -419,10 +472,26 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
get().removeNotification(`download-${appId}`);
if (error) {
const formatDownloadError = (error: any): string => {
if (typeof error === 'object' && error !== null) {
if ('HashMismatch' in error) {
const { actual, desired } = error.HashMismatch;
return `Hash mismatch: expected ${desired.slice(0, 8)}..., got ${actual.slice(0, 8)}...`;
}
// Try to serialize the error object if it's not a HashMismatch
try {
return JSON.stringify(error);
} catch {
return String(error);
}
}
return String(error);
};
get().addNotification({
id: `error-${appId}`,
type: 'error',
message: `Download failed for ${package_id.package_name}: ${error}`,
message: `Download failed for ${package_id.package_name}: ${formatDownloadError(error)}`,
timestamp: Date.now(),
});
} else {

View File

@ -94,6 +94,35 @@ export interface HomepageApp {
favorite: boolean;
}
export interface HashMismatch {
desired: string;
actual: string;
}
export type DownloadError =
| "NoPackage"
| "NotMirroring"
| { HashMismatch: HashMismatch }
| "FileNotFound"
| "WorkerSpawnFailed"
| "HttpClientError"
| "BlobNotFound"
| "VfsError"
| { HandlingError: string }
| "Timeout"
| "InvalidManifest"
| "Offline";
export interface UpdateInfo {
errors: [string, DownloadError][]; // [url/node, error]
pending_manifest_hash: string | null;
}
export type Updates = {
[key: string]: { // package_id
[key: string]: UpdateInfo; // version_hash -> update info
};
};
export type NotificationActionType = 'click' | 'modal' | 'popup' | 'redirect';

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -1813,7 +1813,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -1,7 +1,6 @@
interface chess {
/// Our "chess protocol" request/response format. We'll always serialize these
/// to a byte vector and send them over IPC.
variant request {
/// lazy-load-blob: none.
new-game(new-game-request),

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
pleco = "0.5"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -1774,7 +1774,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -7,24 +7,29 @@ interface contacts {
}
variant request {
/// requires ReadNameOnly capability
/// lazy-load-blob: none.
get-names, // requires read-names-only
get-names,
/// requires Read capability
/// lazy-load-blob: none.
get-all-contacts, // requires read
get-all-contacts,
/// requires Read capability
/// lazy-load-blob: none.
get-contact(string), // requires read
get-contact(string),
/// requires Add capability
/// lazy-load-blob: none.
add-contact(string),
/// requires Add capability
/// lazy-load-blob: none.
add-contact(string), // requires add
/// tuple<node, field, value>
///
add-field(tuple<string, string, string>),
/// requires Remove capability
/// lazy-load-blob: none.
add-field(tuple<string, string, string>), // requires add
remove-contact(string),
/// requires Remove capability
/// lazy-load-blob: none.
remove-contact(string), // requires remove
/// tuple<node, field>
///
/// lazy-load-blob: none.
remove-field(tuple<string, string>), // requires remove
remove-field(tuple<string, string>),
}
variant response {
@ -43,7 +48,6 @@ interface contacts {
/// lazy-load-blob: none.
remove-field,
/// any failed request will receive this response
///
/// lazy-load-blob: none.
err(string),
}

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -6,7 +6,7 @@ publish = false
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -1763,7 +1763,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
version = 4
[[package]]
name = "addr2line"
@ -1761,7 +1761,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",
@ -1917,6 +1917,17 @@ dependencies = [
"tempfile",
]
[[package]]
name = "node_info"
version = "0.1.0"
dependencies = [
"kinode_process_lib",
"process_macros",
"serde",
"serde_json",
"wit-bindgen",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@ -2383,6 +2394,17 @@ dependencies = [
"windows-registry",
]
[[package]]
name = "reset"
version = "0.1.0"
dependencies = [
"kinode_process_lib",
"process_macros",
"serde",
"serde_json",
"wit-bindgen",
]
[[package]]
name = "rfc6979"
version = "0.4.0"
@ -2776,17 +2798,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "state"
version = "0.1.0"
dependencies = [
"kinode_process_lib",
"process_macros",
"serde",
"serde_json",
"wit-bindgen",
]
[[package]]
name = "static_assertions"
version = "1.1.0"

View File

@ -3,8 +3,9 @@ resolver = "2"
members = [
"get-block",
"kns-indexer",
"state",
]
"reset",
"node-info",
]
[profile.release]
panic = "abort"

View File

@ -14,15 +14,15 @@ interface kns-indexer {
/// returns an Option<KnsUpdate>
/// set block to 0 if you just want to get the current state of the indexer
node-info(node-info-request),
/// return the entire state of the indexer at the given block
/// set block to 0 if you just want to get the current state of the indexer
get-state(get-state-request),
/// resets and re-indexes the chain, requires root cap,
/// returns a response varaint reset
reset,
}
variant indexer-response {
name(option<string>),
node-info(option<wit-kns-update>),
get-state(wit-state),
reset(reset-result),
}
record namehash-to-name-request {
@ -35,10 +35,6 @@ interface kns-indexer {
block: u64,
}
record get-state-request {
block: u64,
}
record wit-kns-update {
name: string,
public-key: string,
@ -47,12 +43,14 @@ interface kns-indexer {
routers: list<string>,
}
record wit-state {
chain-id: u64,
contract-address: list<u8>, // 20-byte ETH address
names: list<tuple<string, string>>, // map, but wit doesn't support maps
nodes: list<tuple<string, wit-kns-update>>, // map, but wit doesn't support maps
last-block: u64,
variant reset-result {
success,
err(reset-error),
}
variant reset-error {
no-root-cap,
db-error(string),
}
}

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -11,7 +11,7 @@ anyhow = "1.0"
alloy-primitives = "0.8.15"
alloy-sol-types = "0.8.15"
hex = "0.4.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
rmp-serde = "1.1.2"
serde = { version = "1.0", features = ["derive"] }

View File

@ -1,17 +1,18 @@
use crate::kinode::process::kns_indexer::{
GetStateRequest, IndexerRequest, IndexerResponse, NamehashToNameRequest, NodeInfoRequest,
WitKnsUpdate, WitState,
IndexerRequest, IndexerResponse, NamehashToNameRequest, NodeInfoRequest, ResetError,
ResetResult, WitKnsUpdate,
};
use alloy_primitives::keccak256;
use alloy_sol_types::SolEvent;
use kinode_process_lib::{
await_message, call_init, eth, kimap, net, print_to_terminal, println, timer, Address, Message,
Request, Response,
await_message, call_init, eth, kimap,
kv::{self, Kv},
net, print_to_terminal, println, timer, Address, Capability, Message, Request, Response,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{hash_map::HashMap, BTreeMap},
collections::BTreeMap,
net::{IpAddr, Ipv4Addr, Ipv6Addr},
str::FromStr,
};
wit_bindgen::generate!({
@ -36,57 +37,174 @@ const KIMAP_FIRST_BLOCK: u64 = kimap::KIMAP_FIRST_BLOCK; // optimism
#[cfg(feature = "simulation-mode")]
const KIMAP_FIRST_BLOCK: u64 = 1; // local
const CURRENT_VERSION: u32 = 1;
const MAX_PENDING_ATTEMPTS: u8 = 3;
const SUBSCRIPTION_TIMEOUT: u64 = 60;
const DELAY_MS: u64 = 1_000; // 1s
#[derive(Clone, Debug, Serialize, Deserialize)]
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
struct State {
chain_id: u64,
// what contract this state pertains to
contract_address: eth::Address,
// namehash to human readable name
names: HashMap<String, String>,
// human readable name to most recent on-chain routing information as json
nodes: HashMap<String, net::KnsUpdate>,
// last block we have an update from
/// version of the state in kv
version: u32,
/// last block we have an update from
last_block: u64,
/// kv handle
/// includes keys and values for:
/// "meta:chain_id", "meta:version", "meta:last_block", "meta:contract_address",
/// "names:{namehash}" -> "{name}", "nodes:{name}" -> "{node_info}"
kv: Kv<String, Vec<u8>>,
}
impl From<State> for WitState {
fn from(s: State) -> Self {
let contract_address: [u8; 20] = s.contract_address.into();
WitState {
chain_id: s.chain_id.clone(),
contract_address: contract_address.to_vec(),
names: s
.names
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect::<Vec<_>>(),
nodes: s
.nodes
.iter()
.map(|(k, v)| (k.clone(), v.clone().into()))
.collect::<Vec<_>>(),
last_block: s.last_block.clone(),
impl State {
fn new(our: &Address) -> Self {
let kv: Kv<String, Vec<u8>> = match kv::open(our.package_id(), "kns_indexer", Some(10)) {
Ok(kv) => kv,
Err(e) => panic!("fatal: error opening kns_indexer key_value database: {e:?}"),
};
Self {
version: CURRENT_VERSION,
last_block: KIMAP_FIRST_BLOCK,
kv,
}
}
}
impl From<WitState> for State {
fn from(s: WitState) -> Self {
let contract_address: [u8; 20] = s
.contract_address
.try_into()
.expect("invalid contract addess: doesn't have 20 bytes");
State {
chain_id: s.chain_id.clone(),
contract_address: contract_address.into(),
names: HashMap::from_iter(s.names),
nodes: HashMap::from_iter(s.nodes.iter().map(|(k, v)| (k.clone(), v.clone().into()))),
last_block: s.last_block.clone(),
/// Loads the state from kv, and updates it with the current block number and version.
/// The result of this function will be that the constants for chain ID and contract address
/// are always matching the values in the kv.
fn load(our: &Address) -> Self {
let mut state = Self::new(our);
let desired_contract_address = eth::Address::from_str(KIMAP_ADDRESS).unwrap();
let version = state.get_version();
let chain_id = state.get_chain_id();
let contract_address = state.get_contract_address();
let last_block = state.get_last_block();
if version != Some(CURRENT_VERSION)
|| chain_id != Some(CHAIN_ID)
|| contract_address != Some(desired_contract_address)
{
// if version/contract/chain_id are new, run migrations here.
state.set_version(CURRENT_VERSION);
state.set_chain_id(CHAIN_ID);
state.set_contract_address(desired_contract_address);
}
state.last_block = last_block.unwrap_or(state.last_block);
println!(
"\n 🐦‍⬛ KNS Indexer State\n\
\n\
Version {}\n\
Chain ID {}\n\
Last Block {}\n\
KIMAP {}\n\
\n",
state.version, state.last_block, CHAIN_ID, desired_contract_address,
);
state
}
/// Reset by removing the database and reloading fresh state
fn reset(&self, our: &Address) {
// Remove the entire database
if let Err(e) = kv::remove_db(our.package_id(), "kns_indexer", None) {
println!("Warning: error removing kns_indexer database: {e:?}");
}
}
fn meta_version_key() -> String {
"meta:version".to_string()
}
fn meta_last_block_key() -> String {
"meta:last_block".to_string()
}
fn meta_chain_id_key() -> String {
"meta:chain_id".to_string()
}
fn meta_contract_address_key() -> String {
"meta:contract_address".to_string()
}
fn name_key(namehash: &str) -> String {
format!("name:{}", namehash)
}
fn node_key(name: &str) -> String {
format!("node:{}", name)
}
fn get_last_block(&self) -> Option<u64> {
self.kv.get_as::<u64>(&Self::meta_last_block_key()).ok()
}
fn set_last_block(&mut self, block: u64) {
self.kv
.set_as::<u64>(&Self::meta_last_block_key(), &block, None)
.unwrap();
self.last_block = block;
}
fn get_version(&self) -> Option<u32> {
self.kv.get_as::<u32>(&Self::meta_version_key()).ok()
}
fn set_version(&mut self, version: u32) {
self.kv
.set_as::<u32>(&Self::meta_version_key(), &version, None)
.unwrap();
self.version = version;
}
fn get_name(&self, namehash: &str) -> Option<String> {
self.kv
.get(&Self::name_key(namehash))
.ok()
.and_then(|bytes| String::from_utf8(bytes).ok())
}
fn set_name(&mut self, namehash: &str, name: &str) {
self.kv
.set(&Self::name_key(namehash), &name.as_bytes().to_vec(), None)
.unwrap();
}
fn get_node(&self, name: &str) -> Option<net::KnsUpdate> {
self.kv.get_as::<net::KnsUpdate>(&Self::node_key(name)).ok()
}
fn set_node(&mut self, name: &str, node: &net::KnsUpdate) {
self.kv
.set_as::<net::KnsUpdate>(&Self::node_key(name), &node, None)
.unwrap();
}
fn get_chain_id(&self) -> Option<u64> {
self.kv.get_as::<u64>(&Self::meta_chain_id_key()).ok()
}
fn set_chain_id(&mut self, chain_id: u64) {
self.kv
.set_as::<u64>(&Self::meta_chain_id_key(), &chain_id, None)
.unwrap();
}
fn get_contract_address(&self) -> Option<eth::Address> {
self.kv
.get_as::<eth::Address>(&Self::meta_contract_address_key())
.ok()
}
fn set_contract_address(&mut self, contract_address: eth::Address) {
self.kv
.set_as::<eth::Address>(&Self::meta_contract_address_key(), &contract_address, None)
.expect("Failed to set contract address");
}
}
@ -126,20 +244,8 @@ enum KnsError {
call_init!(init);
fn init(our: Address) {
println!("indexing on contract address {KIMAP_ADDRESS}");
// we **can** persist PKI state between boots but with current size, it's
// more robust just to reload the whole thing. the new contracts will allow
// us to quickly verify we have the updated mapping with root hash, but right
// now it's tricky to recover from missed events.
let state = State {
chain_id: CHAIN_ID,
contract_address: KIMAP_ADDRESS.parse::<eth::Address>().unwrap(),
nodes: HashMap::new(),
names: HashMap::new(),
last_block: KIMAP_FIRST_BLOCK,
};
// state is loaded from kv, and updated with the current block number and version.
let state = State::load(&our);
if let Err(e) = main(our, state) {
println!("fatal error: {e}");
@ -150,42 +256,37 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
#[cfg(feature = "simulation-mode")]
add_temp_hardcoded_tlzs(&mut state);
let chain_id = CHAIN_ID;
let kimap_address = eth::Address::from_str(KIMAP_ADDRESS).unwrap();
// sub_id: 1
// listen to all mint events in kimap
let mints_filter = eth::Filter::new()
.address(state.contract_address)
.address(kimap_address)
.from_block(state.last_block)
.to_block(eth::BlockNumberOrTag::Latest)
.event("Mint(bytes32,bytes32,bytes,bytes)");
let notes = vec![
// sub_id: 2
// listen to all note events that are relevant to the KNS protocol within kimap
let notes_filter = eth::Filter::new()
.address(kimap_address)
.from_block(state.last_block)
.to_block(eth::BlockNumberOrTag::Latest)
.event("Note(bytes32,bytes32,bytes,bytes,bytes)")
.topic3(vec![
keccak256("~ws-port"),
keccak256("~tcp-port"),
keccak256("~net-key"),
keccak256("~routers"),
keccak256("~ip"),
];
// sub_id: 2
let notes_filter = eth::Filter::new()
.address(state.contract_address)
.to_block(eth::BlockNumberOrTag::Latest)
.event("Note(bytes32,bytes32,bytes,bytes,bytes)")
.topic3(notes);
]);
// 60s timeout -- these calls can take a long time
// if they do time out, we try them again
let eth_provider: eth::Provider = eth::Provider::new(state.chain_id, SUBSCRIPTION_TIMEOUT);
print_to_terminal(
1,
&format!(
"subscribing, state.block: {}, chain_id: {}",
state.last_block - 1,
state.chain_id
),
);
let eth_provider: eth::Provider = eth::Provider::new(chain_id, SUBSCRIPTION_TIMEOUT);
// subscribe to logs first, so no logs are missed
println!("subscribing to new logs...");
eth_provider.subscribe_loop(1, mints_filter.clone());
eth_provider.subscribe_loop(2, notes_filter.clone());
@ -198,7 +299,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
let mut pending_notes: BTreeMap<u64, Vec<(kimap::contract::Note, u8)>> = BTreeMap::new();
// if block in state is < current_block, get logs from that part.
println!("syncing old logs...");
println!("syncing old logs from block: {}", state.last_block);
fetch_and_process_logs(
&eth_provider,
&mut state,
@ -211,6 +312,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
notes_filter.clone(),
&mut pending_notes,
);
// set a timer tick so any pending logs will be processed
timer::set_timer(DELAY_MS, None);
println!("done syncing old logs.");
@ -219,9 +321,16 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
let Ok(message) = await_message() else {
continue;
};
// if true, time to go check current block number and handle pending notes.
let tick = message.is_local(&our) && message.source().process == "timer:distro:sys";
let Message::Request { source, body, .. } = message else {
let Message::Request {
source,
body,
capabilities,
..
} = message
else {
if tick {
handle_eth_message(
&mut state,
@ -236,7 +345,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
continue;
};
if source.process == "eth:distro:sys" {
if source.node() == our.node() && source.process == "eth:distro:sys" {
handle_eth_message(
&mut state,
&eth_provider,
@ -247,28 +356,45 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
&notes_filter,
)?;
} else {
let request = serde_json::from_slice(&body)?;
match request {
match serde_json::from_slice(&body)? {
IndexerRequest::NamehashToName(NamehashToNameRequest { ref hash, .. }) => {
// TODO: make sure we've seen the whole block, while actually
// sending a response to the proper place.
Response::new()
.body(IndexerResponse::Name(state.names.get(hash).cloned()))
.body(IndexerResponse::Name(state.get_name(hash)))
.send()?;
}
IndexerRequest::NodeInfo(NodeInfoRequest { ref name, .. }) => {
Response::new()
.body(IndexerResponse::NodeInfo(
state.nodes.get(name).map(|n| n.clone().into()),
.body(&IndexerResponse::NodeInfo(
state
.get_node(name)
.map(|update| WitKnsUpdate::from(update)),
))
.send()?;
}
IndexerRequest::GetState(GetStateRequest { .. }) => {
IndexerRequest::Reset => {
// check for root capability
let root_cap = Capability {
issuer: our.clone(),
params: "{\"root\":true}".to_string(),
};
if source.package_id() != our.package_id() {
if !capabilities.contains(&root_cap) {
Response::new()
.body(IndexerResponse::GetState(state.clone().into()))
.body(IndexerResponse::Reset(ResetResult::Err(
ResetError::NoRootCap,
)))
.send()?;
continue;
}
}
// reload state fresh - this will create new db
state.reset(&our);
Response::new()
.body(IndexerResponse::Reset(ResetResult::Success))
.send()?;
panic!("resetting state, restarting!");
}
}
}
@ -304,11 +430,12 @@ fn handle_eth_message(
}
_ => {}
}
if tick {
let block_number = eth_provider.get_block_number();
if let Ok(block_number) = block_number {
print_to_terminal(2, &format!("new block: {}", block_number));
state.last_block = block_number;
state.set_last_block(block_number);
}
}
handle_pending_notes(state, pending_notes)?;
@ -346,15 +473,9 @@ fn handle_pending_notes(
None => {
print_to_terminal(1, &format!("pending note handling error: {e:?}"))
}
Some(ee) => match ee {
KnsError::NoParentError => {
// print_to_terminal(
// 1,
// &format!("note still awaiting mint; attempt {attempt}"),
// );
Some(KnsError::NoParentError) => {
keep_notes.push((note, attempt + 1));
}
},
}
}
}
@ -381,69 +502,53 @@ fn handle_note(state: &mut State, note: &kimap::contract::Note) -> anyhow::Resul
if !kimap::valid_note(&note_label) {
return Err(anyhow::anyhow!("skipping invalid note: {note_label}"));
}
let Some(node_name) = get_parent_name(&state.names, &node_hash) else {
let Some(node_name) = state.get_name(&node_hash) else {
return Err(KnsError::NoParentError.into());
};
if let Some(mut node) = state.get_node(&node_name) {
match note_label.as_str() {
"~ws-port" => {
let ws = bytes_to_port(&note.data)?;
if let Some(node) = state.nodes.get_mut(&node_name) {
node.ports.insert("ws".to_string(), ws);
// port defined, -> direct
node.routers = vec![];
}
node.routers = vec![]; // port defined, -> direct
}
"~tcp-port" => {
let tcp = bytes_to_port(&note.data)?;
if let Some(node) = state.nodes.get_mut(&node_name) {
node.ports.insert("tcp".to_string(), tcp);
// port defined, -> direct
node.routers = vec![];
}
node.routers = vec![]; // port defined, -> direct
}
"~net-key" => {
if note.data.len() != 32 {
return Err(anyhow::anyhow!("invalid net-key length"));
}
if let Some(node) = state.nodes.get_mut(&node_name) {
node.public_key = hex::encode(&note.data);
}
}
"~routers" => {
let routers = decode_routers(&note.data, state);
if let Some(node) = state.nodes.get_mut(&node_name) {
node.routers = routers;
// -> indirect
node.ports = BTreeMap::new();
node.ports = BTreeMap::new(); // -> indirect
node.ips = vec![];
}
}
"~ip" => {
let ip = bytes_to_ip(&note.data)?;
if let Some(node) = state.nodes.get_mut(&node_name) {
node.ips = vec![ip.to_string()];
// -> direct
node.routers = vec![];
}
node.routers = vec![]; // -> direct
}
_other => {
// Ignore unknown notes
}
}
// only send an update if we have a *full* set of data for networking:
// a node name, plus either <routers> or <ip, port(s)>
if let Some(node_info) = state.nodes.get(&node_name) {
if !node_info.public_key.is_empty()
&& ((!node_info.ips.is_empty() && !node_info.ports.is_empty())
|| node_info.routers.len() > 0)
// Update the node in the state
state.set_node(&node_name, &node);
// Only send an update if we have a *full* set of data for networking
if !node.public_key.is_empty()
&& ((!node.ips.is_empty() && !node.ports.is_empty()) || !node.routers.is_empty())
{
Request::to(("our", "net", "distro", "sys"))
.body(rmp_serde::to_vec(&net::NetAction::KnsUpdate(
node_info.clone(),
))?)
.body(rmp_serde::to_vec(&net::NetAction::KnsUpdate(node))?)
.send()?;
}
}
@ -457,7 +562,7 @@ fn handle_log(
log: &eth::Log,
) -> anyhow::Result<()> {
if let Some(block) = log.block_number {
state.last_block = block;
state.set_last_block(block);
}
match log.topics()[0] {
@ -471,15 +576,15 @@ fn handle_log(
return Err(anyhow::anyhow!("skipping invalid name: {name}"));
}
let full_name = match get_parent_name(&state.names, &parent_hash) {
let full_name = match state.get_name(&parent_hash) {
Some(parent_name) => format!("{name}.{parent_name}"),
None => name,
};
state.names.insert(child_hash.clone(), full_name.clone());
state.nodes.insert(
full_name.clone(),
net::KnsUpdate {
state.set_name(&child_hash.clone(), &full_name.clone());
state.set_node(
&full_name.clone(),
&net::KnsUpdate {
name: full_name.clone(),
public_key: String::new(),
ips: Vec::new(),
@ -519,18 +624,17 @@ fn handle_log(
Ok(())
}
// helpers
/// Get logs for a filter then process them while taking pending notes into account.
fn fetch_and_process_logs(
eth_provider: &eth::Provider,
state: &mut State,
filter: eth::Filter,
pending_notes: &mut BTreeMap<u64, Vec<(kimap::contract::Note, u8)>>,
) {
let filter = filter.from_block(KIMAP_FIRST_BLOCK);
loop {
match eth_provider.get_logs(&filter) {
Ok(logs) => {
println!("log len: {}", logs.len());
for log in logs {
if let Err(e) = handle_log(state, pending_notes, &log) {
print_to_terminal(1, &format!("log-handling error! {e:?}"));
@ -546,52 +650,23 @@ fn fetch_and_process_logs(
}
}
fn get_parent_name(names: &HashMap<String, String>, parent_hash: &str) -> Option<String> {
let mut current_hash = parent_hash;
let mut components = Vec::new(); // Collect components in a vector
let mut visited_hashes = std::collections::HashSet::new();
while let Some(parent_name) = names.get(current_hash) {
if !visited_hashes.insert(current_hash) {
break;
}
if !parent_name.is_empty() {
components.push(parent_name.clone());
}
// Update current_hash to the parent's hash for the next iteration
if let Some(new_parent_hash) = names.get(parent_name) {
current_hash = new_parent_hash;
} else {
break;
}
}
if components.is_empty() {
return None;
}
components.reverse();
Some(components.join("."))
}
// TEMP. Either remove when event reimitting working with anvil,
// or refactor into better structure(!)
#[cfg(feature = "simulation-mode")]
fn add_temp_hardcoded_tlzs(state: &mut State) {
// add some hardcoded top level zones
state.names.insert(
"0xdeeac81ae11b64e7cab86d089c306e5d223552a630f02633ce170d2786ff1bbd".to_string(),
"os".to_string(),
state.set_name(
&"0xdeeac81ae11b64e7cab86d089c306e5d223552a630f02633ce170d2786ff1bbd".to_string(),
&"os".to_string(),
);
state.names.insert(
"0x137d9e4cc0479164d40577620cb3b41b083c6e8dbf58f8523be76d207d6fd8ea".to_string(),
"dev".to_string(),
state.set_name(
&"0x137d9e4cc0479164d40577620cb3b41b083c6e8dbf58f8523be76d207d6fd8ea".to_string(),
&"dev".to_string(),
);
}
/// Decodes bytes into an array of keccak256 hashes (32 bytes each) and returns their full names.
/// Decodes bytes under ~routers in kimap into an array of keccak256 hashes (32 bytes each)
/// and returns the associated node identities.
fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
if data.len() % 32 != 0 {
print_to_terminal(
@ -605,7 +680,7 @@ fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
for chunk in data.chunks(32) {
let hash_str = format!("0x{}", hex::encode(chunk));
match state.names.get(&hash_str) {
match state.get_name(&hash_str) {
Some(full_name) => routers.push(full_name.clone()),
None => print_to_terminal(
1,
@ -617,6 +692,7 @@ fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
routers
}
/// convert IP address stored at ~ip in kimap to IpAddr
pub fn bytes_to_ip(bytes: &[u8]) -> anyhow::Result<IpAddr> {
match bytes.len() {
4 => {
@ -633,6 +709,7 @@ pub fn bytes_to_ip(bytes: &[u8]) -> anyhow::Result<IpAddr> {
}
}
/// convert port stored at ~[protocol]-port in kimap to u16
pub fn bytes_to_port(bytes: &[u8]) -> anyhow::Result<u16> {
match bytes.len() {
2 => Ok(u16::from_be_bytes([bytes[0], bytes[1]])),

View File

@ -0,0 +1,20 @@
[package]
name = "node_info"
version = "0.1.0"
edition = "2021"
[features]
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
process_macros = "0.1"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

View File

@ -0,0 +1,35 @@
use kinode::process::kns_indexer::{IndexerRequest, IndexerResponse, NodeInfoRequest};
use kinode_process_lib::{println, script, Address, Request};
use std::str::FromStr;
wit_bindgen::generate!({
path: "target/wit",
world: "kns-indexer-sys-v0",
generate_unused_types: true,
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
});
script!(init);
fn init(_our: Address, args: String) -> String {
let node_name = args.split_whitespace().next().unwrap_or("").to_string();
let kns = Address::from_str("our@kns-indexer:kns-indexer:sys").unwrap();
let resp = Request::to(kns)
.body(IndexerRequest::NodeInfo(NodeInfoRequest {
name: node_name,
block: 0,
}))
.send_and_await_response(5)
.unwrap()
.unwrap();
let resp = serde_json::from_slice::<IndexerResponse>(&resp.body()).unwrap();
match resp {
IndexerResponse::NodeInfo(node_info) => {
format!("node info: {node_info:#?}")
}
_ => "node info: name not found".to_string(),
}
}

View File

@ -8,12 +8,14 @@
"eth:distro:sys",
"http-server:distro:sys",
"net:distro:sys",
"timer:distro:sys"
"timer:distro:sys",
"kv:distro:sys"
],
"grant_capabilities": [
"eth:distro:sys",
"http-server:distro:sys",
"timer:distro:sys"
"timer:distro:sys",
"kv:distro:sys"
],
"public": false
}

View File

@ -11,7 +11,25 @@
],
"wit_version": 1
},
"state.wasm": {
"reset.wasm": {
"root": false,
"public": false,
"request_networking": false,
"request_capabilities": [
"kns-indexer:kns-indexer:sys",
{
"process": "kns-indexer:kns-indexer:sys",
"params": {
"root": true
}
}
],
"grant_capabilities": [
"kns-indexer:kns-indexer:sys"
],
"wit_version": 1
},
"node-info.wasm": {
"root": false,
"public": false,
"request_networking": false,

View File

@ -1,5 +1,5 @@
[package]
name = "state"
name = "reset"
version = "0.1.0"
edition = "2021"
@ -7,10 +7,10 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
process_macros = "0.1"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
process_macros = "0.1"
wit-bindgen = "0.36.0"
[lib]

View File

@ -0,0 +1,23 @@
use std::str::FromStr;
use kinode::process::kns_indexer::IndexerRequest;
use kinode_process_lib::{call_init, Address, Request};
wit_bindgen::generate!({
path: "target/wit",
world: "kns-indexer-sys-v0",
generate_unused_types: true,
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
});
call_init!(init);
fn init(_our: Address) {
// request timeout of 5s
let kns = Address::from_str("our@kns-indexer:kns-indexer:sys").unwrap();
let _resp = Request::to(kns)
.body(IndexerRequest::Reset)
.send_and_await_response(5)
.unwrap()
.unwrap();
}

View File

@ -1,46 +0,0 @@
use crate::kinode::process::kns_indexer::{GetStateRequest, IndexerRequest, IndexerResponse};
use kinode_process_lib::{eth, script, Address, Message, Request};
wit_bindgen::generate!({
path: "target/wit",
world: "kns-indexer-sys-v0",
generate_unused_types: true,
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
});
script!(init);
fn init(_our: Address, _args: String) -> String {
// we don't take any args
let Ok(Message::Response { body, .. }) =
Request::to(("our", "kns-indexer", "kns-indexer", "sys"))
.body(IndexerRequest::GetState(GetStateRequest { block: 0 }))
.send_and_await_response(10)
.unwrap()
else {
return "failed to get state from kns-indexer".to_string();
};
let Ok(IndexerResponse::GetState(state)) = body.try_into() else {
return "failed to deserialize state".to_string();
};
// can change later, but for now, just print every known node name
let mut names = state
.names
.iter()
.map(|(_k, v)| v.clone())
.collect::<Vec<_>>();
names.sort();
let contract_address: [u8; 20] = state
.contract_address
.try_into()
.expect("invalid contract addess: doesn't have 20 bytes");
let contract_address: eth::Address = contract_address.into();
format!(
"\nrunning on chain id {}\nCA: {}\n{} known nodes as of block {}\n {}",
state.chain_id,
contract_address,
names.len(),
state.last_block,
names.join("\n ")
)
}

View File

@ -1751,7 +1751,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -19,6 +19,13 @@
},
"http-server:distro:sys",
"kernel:distro:sys",
"kns-indexer:kns-indexer:sys",
{
"process": "kns-indexer:kns-indexer:sys",
"params": {
"root": true
}
},
"net:distro:sys",
"vfs:distro:sys",
{

View File

@ -165,7 +165,7 @@
margin-left: 6px;
}
</style>
<script type="module" crossorigin src="/settings:settings:sys/assets/index-BVR8Atdy.js"></script>
<script type="module" crossorigin src="/settings:settings:sys/assets/index-CwCaX2Ut.js"></script>
<link rel="stylesheet" crossorigin href="/settings:settings:sys/assets/index-iGirBDd0.css">
</head>

View File

@ -10,7 +10,7 @@ simulation-mode = []
anyhow = "1.0"
base64 = "0.22.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
rmp-serde = "1.2.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -1,10 +1,10 @@
use kinode_process_lib::{
await_message, call_init, eth, get_blob, homepage, http, kernel_types, kimap, net, println,
Address, Capability, LazyLoadBlob, Message, NodeId, ProcessId, Request, Response, SendError,
SendErrorKind,
await_message, call_init, eth, get_blob, get_capability, homepage, http, kernel_types, kimap,
net, println, Address, Capability, LazyLoadBlob, Message, NodeId, ProcessId, Request, Response,
SendError, SendErrorKind,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::{collections::HashMap, vec};
const ICON: &str = include_str!("icon");
@ -18,6 +18,7 @@ enum SettingsRequest {
PeerId(NodeId),
EthConfig(eth::EthConfigAction),
Shutdown,
Reset,
KillProcess(ProcessId),
SetStylesheet(String),
}
@ -464,6 +465,19 @@ fn handle_settings_request(
.send()
.unwrap();
}
SettingsRequest::Reset => {
// reset KNS
let kns_address = Address::new(&state.our.node, ("kns-indexer", "kns-indexer", "sys"));
let root_cap = get_capability(&kns_address, "{\"root\":true}");
if let Some(cap) = root_cap {
Request::to(("our", "kns-indexer", "kns-indexer", "sys"))
.body(serde_json::to_vec(&SettingsRequest::Reset).unwrap())
.capabilities(vec![cap])
.send()
.unwrap();
}
}
SettingsRequest::KillProcess(pid) => {
// kill a process
if let Err(_) = Request::to(("our", "kernel", "distro", "sys"))

View File

@ -80,6 +80,11 @@ function App() {
setTimeout(() => window.location.reload(), 1000);
};
const handleReset = () => {
apiCall("Reset");
setTimeout(() => window.location.reload(), 1000);
};
const handleSaveStylesheet = () => {
const stylesheet = (document.getElementById('stylesheet-editor') as HTMLTextAreaElement).value;
apiCall({ "SetStylesheet": stylesheet });
@ -141,7 +146,20 @@ function App() {
<p id="net-key">{appState.identity?.networking_key}</p>
{appState.identity?.ws_routing && <p id="ip-ports">{appState.identity.ws_routing}</p>}
{appState.identity?.routers && <p id="routers">{appState.identity.routers}</p>}
<button id="shutdown" onClick={handleShutdown}>shut down node(!)</button>
<div className="mt-16 flex flex-col justify-start">
<button
onClick={handleShutdown}
className="bg-red-500 hover:bg-red-600 text-white font-bold py-2 px-4 rounded w-full mb-8"
>
Shutdown Node
</button>
<button
onClick={handleReset}
className="bg-yellow-500 hover:bg-yellow-600 text-white font-bold py-2 px-4 rounded w-full"
>
Reset KNS State
</button>
</div>
</article>
<article id="pings">

View File

@ -1919,7 +1919,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
wit-bindgen = "0.36.0"
[lib]

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
wit-bindgen = "0.36.0"
[lib]

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
rmp-serde = "1.1.2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -8,7 +8,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
clap = "4.4"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
regex = "1.10.3"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
rmp-serde = "1.1.2"
serde = { version = "1.0", features = ["derive"] }
wit-bindgen = "0.36.0"

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
rmp-serde = "1.1.2"
serde = { version = "1.0", features = ["derive"] }
wit-bindgen = "0.36.0"

View File

@ -7,7 +7,7 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
rmp-serde = "1.1.2"
serde = { version = "1.0", features = ["derive"] }
wit-bindgen = "0.36.0"

View File

@ -20,6 +20,12 @@
"http-client:distro:sys",
"kernel:distro:sys",
"kns-indexer:kns-indexer:sys",
{
"process": "kns-indexer:kns-indexer:sys",
"params": {
"root": true
}
},
"kv:distro:sys",
"net:distro:sys",
"sqlite:distro:sys",

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
rand = "0.8"
regex = "1.10.3"
serde = { version = "1.0", features = ["derive"] }

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
clap = "4.4"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"

View File

@ -1751,7 +1751,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.10.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
dependencies = [
"alloy",
"alloy-primitives",

View File

@ -9,7 +9,7 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -156,6 +156,9 @@
<script src="https://cdnjs.cloudflare.com/ajax/libs/argon2-browser/1.18.0/argon2-bundled.min.js"
integrity="sha512-Alrh8vbmKDc5xiq7I/y8LTDwy9nw1nT9S/yR73HMMoWrpX4S1kizNPdWM896c/CDIGILNwAiaih627A94kRhYQ=="
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<!-- REMOVE IN 1.0.0 -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/crypto-js/4.2.0/crypto-js.min.js"></script>
<!--------------------->
<script>
let isInitialized = false;
@ -207,6 +210,24 @@
if (result.status == 200) {
window.location.reload();
} else {
// REMOVE IN 1.0.0
const hashHex = '0x' + CryptoJS.SHA256(password).toString(CryptoJS.enc.Hex);
const result = await fetch("/login", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
password_hash: hashHex,
subdomain: isSecureSubdomain ? firstPathItem : '',
}),
});
if (result.status == 200) {
window.location.reload();
} else {
throw new Error("Login failed");
}
// END REMOVE IN 1.0.0
throw new Error("Login failed");
}
}).catch(err => {

View File

@ -874,8 +874,20 @@ async fn login_with_password(
let password_hash_hex = format!("0x{}", password_hash);
let k = keygen::decode_keyfile(&disk_keyfile, &password_hash_hex)
.expect("could not decode keyfile, password incorrect");
// SWITCH BACK TO THIS IN 1.0.0
// let k = keygen::decode_keyfile(&disk_keyfile, &password_hash_hex)
// .expect("could not decode keyfile, password incorrect");
// REMOVE IN 1.0.0
let k = match keygen::decode_keyfile(&disk_keyfile, &password_hash_hex) {
Ok(k) => k,
Err(_) => {
use sha2::{Digest, Sha256};
let password_hash = format!("0x{}", hex::encode(Sha256::digest(password)));
keygen::decode_keyfile(&disk_keyfile, &password_hash)
.expect("could not decode keyfile, password incorrect")
}
};
let mut our = Identity {
name: k.username.clone(),

View File

@ -7,6 +7,8 @@ import {
import { PageProps } from "../lib/types";
import Loader from "../components/Loader";
import { redirectToHomepage } from "../utils/redirect-to-homepage";
// REMOVE IN 1.0.0
import { sha256, toBytes } from "viem";
interface ImportKeyfileProps extends PageProps { }
@ -69,7 +71,28 @@ function ImportKeyfile({
});
if (result.status > 399) {
throw new Error(await result.text());
// REMOVE IN 1.0.0
let hashed_password = sha256(toBytes(pw));
const result = await fetch("/import-keyfile", {
method: "POST",
credentials: 'include',
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
keyfile: Buffer.from(localKey).toString('utf8'),
password_hash: hashed_password,
}),
});
if (result.status > 399) {
throw new Error("Incorrect password");
} else {
redirectToHomepage();
}
// END REMOVE IN 1.0.0
// BRING BACK IN 1.0.0
// throw new Error(await result.text());
}
redirectToHomepage();
}).catch(err => {

View File

@ -4,6 +4,8 @@ import Loader from "../components/Loader";
import { useNavigate } from "react-router-dom";
import { Tooltip } from "../components/Tooltip";
import { redirectToHomepage } from "../utils/redirect-to-homepage";
// REMOVE IN 1.0.0
import { sha256, toBytes } from "viem";
interface LoginProps extends PageProps { }
@ -53,8 +55,28 @@ function Login({
}
);
if (result.status > 399) {
// REMOVE IN 1.0.0
let hashed_password = sha256(toBytes(pw));
const result = await fetch(
"/login",
{
method: "POST",
credentials: 'include',
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ password_hash: hashed_password }),
}
);
if (result.status > 399) {
throw new Error(await result.text());
} else {
redirectToHomepage();
}
// END REMOVE IN 1.0.0
// BRING BACK IN 1.0.0
// throw new Error(await result.text());
}
redirectToHomepage();
}).catch(err => {

View File

@ -83,10 +83,10 @@ impl SqliteState {
fs::create_dir_all(&db_path).await?;
let db_file_path = format!("{}.db", db);
let db_file_path = db_path.join(format!("{}.db", db));
let db_conn = Connection::open(db_file_path)?;
let _ = db_conn.execute("PRAGMA journal_mode=WAL", []);
let _: String = db_conn.query_row("PRAGMA journal_mode=WAL", [], |row| row.get(0))?;
self.open_dbs.insert(key, Mutex::new(db_conn));

6
package-lock.json generated Normal file
View File

@ -0,0 +1,6 @@
{
"name": "kinode",
"lockfileVersion": 2,
"requires": true,
"packages": {}
}