mirror of
https://github.com/uqbar-dao/nectar.git
synced 2024-12-20 23:21:36 +03:00
Merge branch 'v0.10.0' into bp/kviterators
This commit is contained in:
commit
a59d540ba1
39
Cargo.lock
generated
39
Cargo.lock
generated
@ -1,6 +1,6 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
# This file is automatically @generated by Cargo.
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 4
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "addr2line"
|
name = "addr2line"
|
||||||
@ -4191,7 +4191,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy 0.8.1",
|
"alloy 0.8.1",
|
||||||
"alloy-primitives 0.8.15",
|
"alloy-primitives 0.8.15",
|
||||||
@ -4701,6 +4701,17 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "node_info"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"kinode_process_lib 0.10.0",
|
||||||
|
"process_macros",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"wit-bindgen 0.36.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nohash-hasher"
|
name = "nohash-hasher"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@ -5684,6 +5695,17 @@ dependencies = [
|
|||||||
"windows-registry",
|
"windows-registry",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "reset"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"kinode_process_lib 0.10.0",
|
||||||
|
"process_macros",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"wit-bindgen 0.36.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rfc6979"
|
name = "rfc6979"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@ -6294,7 +6316,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "snow"
|
name = "snow"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
source = "git+https://github.com/dr-frmr/snow?branch=dr/extract_cipherstates#1d4eb5f6747aa59aabb32bbbe698fb4bb7dfb9a4"
|
source = "git+https://github.com/dr-frmr/snow?branch=dr%2Fextract_cipherstates#1d4eb5f6747aa59aabb32bbbe698fb4bb7dfb9a4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aes-gcm",
|
"aes-gcm",
|
||||||
"blake2",
|
"blake2",
|
||||||
@ -6364,17 +6386,6 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "state"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"kinode_process_lib 0.10.0",
|
|
||||||
"process_macros",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"wit-bindgen 0.36.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "static_assertions"
|
name = "static_assertions"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
|
@ -19,8 +19,8 @@ members = [
|
|||||||
"kinode/packages/chess/chess",
|
"kinode/packages/chess/chess",
|
||||||
"kinode/packages/contacts/contacts",
|
"kinode/packages/contacts/contacts",
|
||||||
"kinode/packages/homepage/homepage",
|
"kinode/packages/homepage/homepage",
|
||||||
"kinode/packages/kns-indexer/kns-indexer", "kinode/packages/kns-indexer/get-block", "kinode/packages/kns-indexer/state",
|
"kinode/packages/kns-indexer/kns-indexer", "kinode/packages/kns-indexer/get-block", "kinode/packages/settings/settings", "kinode/packages/kns-indexer/reset",
|
||||||
"kinode/packages/settings/settings",
|
"kinode/packages/kns-indexer/node-info",
|
||||||
"kinode/packages/terminal/terminal",
|
"kinode/packages/terminal/terminal",
|
||||||
"kinode/packages/terminal/alias", "kinode/packages/terminal/cat", "kinode/packages/terminal/echo",
|
"kinode/packages/terminal/alias", "kinode/packages/terminal/cat", "kinode/packages/terminal/echo",
|
||||||
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",
|
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",
|
||||||
|
2
kinode/packages/app-store/Cargo.lock
generated
2
kinode/packages/app-store/Cargo.lock
generated
@ -1890,7 +1890,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -134,6 +134,10 @@ interface chain {
|
|||||||
///
|
///
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
stop-auto-update(package-id),
|
stop-auto-update(package-id),
|
||||||
|
/// Reset app-store db
|
||||||
|
///
|
||||||
|
/// lazy-load-blob: none.
|
||||||
|
reset,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Responses from the chain component
|
/// Responses from the chain component
|
||||||
@ -149,6 +153,8 @@ interface chain {
|
|||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
auto-update-stopped,
|
auto-update-stopped,
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
|
/// successful reset
|
||||||
|
reset-ok,
|
||||||
err(chain-error),
|
err(chain-error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,6 +303,9 @@ interface downloads {
|
|||||||
blob-not-found,
|
blob-not-found,
|
||||||
vfs-error,
|
vfs-error,
|
||||||
handling-error(string),
|
handling-error(string),
|
||||||
|
timeout,
|
||||||
|
invalid-manifest,
|
||||||
|
offline,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Notification that a download is complete
|
/// Notification that a download is complete
|
||||||
@ -306,12 +315,26 @@ interface downloads {
|
|||||||
err: option<download-error>,
|
err: option<download-error>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Request for an auto-download complete
|
/// Variant for an auto-download complete
|
||||||
record auto-download-complete-request {
|
variant auto-download-complete-request {
|
||||||
download-info: download-complete-request,
|
success(auto-download-success),
|
||||||
|
err(auto-download-error),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Auto-download success
|
||||||
|
record auto-download-success {
|
||||||
|
package-id: package-id,
|
||||||
|
version-hash: string,
|
||||||
manifest-hash: string,
|
manifest-hash: string,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Auto-download error
|
||||||
|
record auto-download-error {
|
||||||
|
package-id: package-id,
|
||||||
|
version-hash: string,
|
||||||
|
tries: list<tuple<string, download-error>>, // (mirror, error)
|
||||||
|
}
|
||||||
|
|
||||||
/// Represents a hash mismatch error
|
/// Represents a hash mismatch error
|
||||||
record hash-mismatch {
|
record hash-mismatch {
|
||||||
desired: string,
|
desired: string,
|
||||||
|
@ -11,7 +11,7 @@ alloy-primitives = "0.8.15"
|
|||||||
alloy-sol-types = "0.8.15"
|
alloy-sol-types = "0.8.15"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -3,11 +3,13 @@
|
|||||||
//! and sends back http_responses.
|
//! and sends back http_responses.
|
||||||
//!
|
//!
|
||||||
use crate::{
|
use crate::{
|
||||||
kinode::process::chain::{ChainRequests, ChainResponses},
|
kinode::process::{
|
||||||
kinode::process::downloads::{
|
chain::{ChainRequests, ChainResponses},
|
||||||
DownloadRequests, DownloadResponses, Entry, LocalDownloadRequest, RemoveFileRequest,
|
downloads::{
|
||||||
|
DownloadRequests, DownloadResponses, Entry, LocalDownloadRequest, RemoveFileRequest,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
state::{MirrorCheck, PackageState, State},
|
state::{MirrorCheck, PackageState, State, Updates},
|
||||||
};
|
};
|
||||||
use kinode_process_lib::{
|
use kinode_process_lib::{
|
||||||
http::{self, server, Method, StatusCode},
|
http::{self, server, Method, StatusCode},
|
||||||
@ -28,6 +30,7 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
|
|||||||
"/downloads", // all downloads
|
"/downloads", // all downloads
|
||||||
"/installed", // all installed apps
|
"/installed", // all installed apps
|
||||||
"/ourapps", // all apps we've published
|
"/ourapps", // all apps we've published
|
||||||
|
"/updates", // all auto_updates
|
||||||
"/apps/:id", // detail about an on-chain app
|
"/apps/:id", // detail about an on-chain app
|
||||||
"/downloads/:id", // local downloads for an app
|
"/downloads/:id", // local downloads for an app
|
||||||
"/installed/:id", // detail about an installed app
|
"/installed/:id", // detail about an installed app
|
||||||
@ -37,7 +40,9 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
|
|||||||
"/apps/:id/install", // install a downloaded app
|
"/apps/:id/install", // install a downloaded app
|
||||||
"/downloads/:id/mirror", // start mirroring a version of a downloaded app
|
"/downloads/:id/mirror", // start mirroring a version of a downloaded app
|
||||||
"/downloads/:id/remove", // remove a downloaded app
|
"/downloads/:id/remove", // remove a downloaded app
|
||||||
|
"/reset", // reset chain state, re-index
|
||||||
"/apps/:id/auto-update", // set auto-updating a version of a downloaded app
|
"/apps/:id/auto-update", // set auto-updating a version of a downloaded app
|
||||||
|
"/updates/:id/clear", // clear update info for an app.
|
||||||
"/mirrorcheck/:node", // check if a node/mirror is online/offline
|
"/mirrorcheck/:node", // check if a node/mirror is online/offline
|
||||||
] {
|
] {
|
||||||
http_server
|
http_server
|
||||||
@ -207,9 +212,10 @@ fn make_widget() -> String {
|
|||||||
pub fn handle_http_request(
|
pub fn handle_http_request(
|
||||||
our: &Address,
|
our: &Address,
|
||||||
state: &mut State,
|
state: &mut State,
|
||||||
|
updates: &mut Updates,
|
||||||
req: &server::IncomingHttpRequest,
|
req: &server::IncomingHttpRequest,
|
||||||
) -> (server::HttpResponse, Option<LazyLoadBlob>) {
|
) -> (server::HttpResponse, Option<LazyLoadBlob>) {
|
||||||
match serve_paths(our, state, req) {
|
match serve_paths(our, state, updates, req) {
|
||||||
Ok((status_code, _headers, body)) => (
|
Ok((status_code, _headers, body)) => (
|
||||||
server::HttpResponse::new(status_code).header("Content-Type", "application/json"),
|
server::HttpResponse::new(status_code).header("Content-Type", "application/json"),
|
||||||
Some(LazyLoadBlob {
|
Some(LazyLoadBlob {
|
||||||
@ -248,13 +254,13 @@ fn gen_package_info(id: &PackageId, state: &PackageState) -> serde_json::Value {
|
|||||||
"our_version_hash": state.our_version_hash,
|
"our_version_hash": state.our_version_hash,
|
||||||
"verified": state.verified,
|
"verified": state.verified,
|
||||||
"caps_approved": state.caps_approved,
|
"caps_approved": state.caps_approved,
|
||||||
"pending_update_hash": state.pending_update_hash,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serve_paths(
|
fn serve_paths(
|
||||||
our: &Address,
|
our: &Address,
|
||||||
state: &mut State,
|
state: &mut State,
|
||||||
|
updates: &mut Updates,
|
||||||
req: &server::IncomingHttpRequest,
|
req: &server::IncomingHttpRequest,
|
||||||
) -> anyhow::Result<(http::StatusCode, Option<HashMap<String, String>>, Vec<u8>)> {
|
) -> anyhow::Result<(http::StatusCode, Option<HashMap<String, String>>, Vec<u8>)> {
|
||||||
let method = req.method()?;
|
let method = req.method()?;
|
||||||
@ -533,7 +539,6 @@ fn serve_paths(
|
|||||||
.ok_or(anyhow::anyhow!("missing blob"))?
|
.ok_or(anyhow::anyhow!("missing blob"))?
|
||||||
.bytes;
|
.bytes;
|
||||||
let body_json: serde_json::Value = serde_json::from_slice(&body).unwrap_or_default();
|
let body_json: serde_json::Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
|
||||||
let version_hash = body_json
|
let version_hash = body_json
|
||||||
.get("version_hash")
|
.get("version_hash")
|
||||||
.and_then(|v| v.as_str())
|
.and_then(|v| v.as_str())
|
||||||
@ -697,6 +702,53 @@ fn serve_paths(
|
|||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// GET all failed/pending auto_updates
|
||||||
|
"/updates" => {
|
||||||
|
let serialized = serde_json::to_vec(&updates).unwrap_or_default();
|
||||||
|
return Ok((StatusCode::OK, None, serialized));
|
||||||
|
}
|
||||||
|
// POST clear all failed/pending auto_updates for a package_id
|
||||||
|
"/updates/:id/clear" => {
|
||||||
|
let Ok(package_id) = get_package_id(url_params) else {
|
||||||
|
return Ok((
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
None,
|
||||||
|
format!("Missing package_id").into_bytes(),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
if method != Method::POST {
|
||||||
|
return Ok((
|
||||||
|
StatusCode::METHOD_NOT_ALLOWED,
|
||||||
|
None,
|
||||||
|
format!("Invalid method {method} for {bound_path}").into_bytes(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let _ = updates.package_updates.remove(&package_id);
|
||||||
|
updates.save();
|
||||||
|
Ok((StatusCode::OK, None, vec![]))
|
||||||
|
}
|
||||||
|
// POST reset chain state, re-index
|
||||||
|
"/reset" => {
|
||||||
|
if method != Method::POST {
|
||||||
|
return Ok((
|
||||||
|
StatusCode::METHOD_NOT_ALLOWED,
|
||||||
|
None,
|
||||||
|
format!("Invalid method {method} for {bound_path}").into_bytes(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let chain = Address::from_str("our@chain:app-store:sys")?;
|
||||||
|
|
||||||
|
let resp = Request::new()
|
||||||
|
.target(chain)
|
||||||
|
.body(&ChainRequests::Reset)
|
||||||
|
.send_and_await_response(5)??;
|
||||||
|
let msg = serde_json::from_slice::<ChainResponses>(resp.body())?;
|
||||||
|
if let ChainResponses::ResetOk = msg {
|
||||||
|
Ok((StatusCode::OK, None, vec![]))
|
||||||
|
} else {
|
||||||
|
Ok((StatusCode::INTERNAL_SERVER_ERROR, None, vec![]))
|
||||||
|
}
|
||||||
|
}
|
||||||
// GET online/offline mirrors for a listed app
|
// GET online/offline mirrors for a listed app
|
||||||
"/mirrorcheck/:node" => {
|
"/mirrorcheck/:node" => {
|
||||||
if method != Method::GET {
|
if method != Method::GET {
|
||||||
|
@ -42,7 +42,7 @@ use kinode_process_lib::{
|
|||||||
LazyLoadBlob, Message, PackageId, Response,
|
LazyLoadBlob, Message, PackageId, Response,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use state::State;
|
use state::{State, UpdateInfo, Updates};
|
||||||
|
|
||||||
wit_bindgen::generate!({
|
wit_bindgen::generate!({
|
||||||
path: "target/wit",
|
path: "target/wit",
|
||||||
@ -78,20 +78,22 @@ pub enum Resp {
|
|||||||
|
|
||||||
call_init!(init);
|
call_init!(init);
|
||||||
fn init(our: Address) {
|
fn init(our: Address) {
|
||||||
println!("started");
|
|
||||||
|
|
||||||
let mut http_server = http::server::HttpServer::new(5);
|
let mut http_server = http::server::HttpServer::new(5);
|
||||||
http_api::init_frontend(&our, &mut http_server);
|
http_api::init_frontend(&our, &mut http_server);
|
||||||
|
|
||||||
|
// state = state built from the filesystem, installed packages
|
||||||
|
// updates = state saved with get/set_state(), auto_update metadata.
|
||||||
let mut state = State::load().expect("state loading failed");
|
let mut state = State::load().expect("state loading failed");
|
||||||
|
let mut updates = Updates::load();
|
||||||
loop {
|
loop {
|
||||||
match await_message() {
|
match await_message() {
|
||||||
Err(send_error) => {
|
Err(send_error) => {
|
||||||
print_to_terminal(1, &format!("main: got network error: {send_error}"));
|
print_to_terminal(1, &format!("main: got network error: {send_error}"));
|
||||||
}
|
}
|
||||||
Ok(message) => {
|
Ok(message) => {
|
||||||
if let Err(e) = handle_message(&our, &mut state, &mut http_server, &message) {
|
if let Err(e) =
|
||||||
|
handle_message(&our, &mut state, &mut updates, &mut http_server, &message)
|
||||||
|
{
|
||||||
let error_message = format!("error handling message: {e:?}");
|
let error_message = format!("error handling message: {e:?}");
|
||||||
print_to_terminal(1, &error_message);
|
print_to_terminal(1, &error_message);
|
||||||
Response::new()
|
Response::new()
|
||||||
@ -111,6 +113,7 @@ fn init(our: Address) {
|
|||||||
fn handle_message(
|
fn handle_message(
|
||||||
our: &Address,
|
our: &Address,
|
||||||
state: &mut State,
|
state: &mut State,
|
||||||
|
updates: &mut Updates,
|
||||||
http_server: &mut http::server::HttpServer,
|
http_server: &mut http::server::HttpServer,
|
||||||
message: &Message,
|
message: &Message,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
@ -134,7 +137,7 @@ fn handle_message(
|
|||||||
}
|
}
|
||||||
http_server.handle_request(
|
http_server.handle_request(
|
||||||
server_request,
|
server_request,
|
||||||
|incoming| http_api::handle_http_request(our, state, &incoming),
|
|incoming| http_api::handle_http_request(our, state, updates, &incoming),
|
||||||
|_channel_id, _message_type, _blob| {
|
|_channel_id, _message_type, _blob| {
|
||||||
// not expecting any websocket messages from FE currently
|
// not expecting any websocket messages from FE currently
|
||||||
},
|
},
|
||||||
@ -168,40 +171,80 @@ fn handle_message(
|
|||||||
"auto download complete from non-local node"
|
"auto download complete from non-local node"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
// auto_install case:
|
|
||||||
// the downloads process has given us the new package manifest's
|
|
||||||
// capability hashes, and the old package's capability hashes.
|
|
||||||
// we can use these to determine if the new package has the same
|
|
||||||
// capabilities as the old one, and if so, auto-install it.
|
|
||||||
|
|
||||||
let manifest_hash = req.manifest_hash;
|
match req {
|
||||||
let package_id = req.download_info.package_id;
|
AutoDownloadCompleteRequest::Success(succ) => {
|
||||||
let version_hash = req.download_info.version_hash;
|
// auto_install case:
|
||||||
|
// the downloads process has given us the new package manifest's
|
||||||
|
// capability hashes, and the old package's capability hashes.
|
||||||
|
// we can use these to determine if the new package has the same
|
||||||
|
// capabilities as the old one, and if so, auto-install it.
|
||||||
|
let manifest_hash = succ.manifest_hash;
|
||||||
|
let package_id = succ.package_id;
|
||||||
|
let version_hash = succ.version_hash;
|
||||||
|
|
||||||
let process_lib_package_id = package_id.clone().to_process_lib();
|
let process_lib_package_id = package_id.clone().to_process_lib();
|
||||||
|
|
||||||
// first, check if we have the package and get its manifest hash
|
// first, check if we have the package and get its manifest hash
|
||||||
let should_auto_install = state
|
let should_auto_install = state
|
||||||
.packages
|
.packages
|
||||||
.get(&process_lib_package_id)
|
.get(&process_lib_package_id)
|
||||||
.map(|package| package.manifest_hash == Some(manifest_hash.clone()))
|
.map(|package| package.manifest_hash == Some(manifest_hash.clone()))
|
||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
|
|
||||||
if should_auto_install {
|
if should_auto_install {
|
||||||
if let Err(e) =
|
if let Err(e) =
|
||||||
utils::install(&package_id, None, &version_hash, state, &our.node)
|
utils::install(&package_id, None, &version_hash, state, &our.node)
|
||||||
{
|
{
|
||||||
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
|
println!("error auto-installing package: {e}");
|
||||||
package.pending_update_hash = Some(version_hash);
|
// Get or create the outer map for this package
|
||||||
|
updates
|
||||||
|
.package_updates
|
||||||
|
.entry(package_id.to_process_lib())
|
||||||
|
.or_default()
|
||||||
|
.insert(
|
||||||
|
version_hash.clone(),
|
||||||
|
UpdateInfo {
|
||||||
|
errors: vec![],
|
||||||
|
pending_manifest_hash: Some(manifest_hash.clone()),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
updates.save();
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"auto-installed update for package: {process_lib_package_id}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// TODO.
|
||||||
|
updates
|
||||||
|
.package_updates
|
||||||
|
.entry(package_id.to_process_lib())
|
||||||
|
.or_default()
|
||||||
|
.insert(
|
||||||
|
version_hash.clone(),
|
||||||
|
UpdateInfo {
|
||||||
|
errors: vec![],
|
||||||
|
pending_manifest_hash: Some(manifest_hash.clone()),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
updates.save();
|
||||||
}
|
}
|
||||||
println!("error auto-installing package: {e}");
|
|
||||||
} else {
|
|
||||||
println!("auto-installed update for package: {process_lib_package_id}");
|
|
||||||
}
|
}
|
||||||
} else {
|
AutoDownloadCompleteRequest::Err(err) => {
|
||||||
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
|
println!("error auto-downloading package: {err:?}");
|
||||||
package.pending_update_hash = Some(version_hash);
|
updates
|
||||||
println!("error auto-installing package: manifest hash mismatch");
|
.package_updates
|
||||||
|
.entry(err.package_id.to_process_lib())
|
||||||
|
.or_default()
|
||||||
|
.insert(
|
||||||
|
err.version_hash.clone(),
|
||||||
|
UpdateInfo {
|
||||||
|
errors: err.tries,
|
||||||
|
pending_manifest_hash: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
updates.save();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::{utils, VFS_TIMEOUT};
|
use crate::{kinode::process::downloads::DownloadError, utils, VFS_TIMEOUT};
|
||||||
use kinode_process_lib::{kimap, vfs, PackageId};
|
use kinode_process_lib::{get_state, kimap, set_state, vfs, PackageId};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
@ -54,9 +54,6 @@ pub struct PackageState {
|
|||||||
/// capabilities have changed. if they have changed, auto-install must fail
|
/// capabilities have changed. if they have changed, auto-install must fail
|
||||||
/// and the user must approve the new capabilities.
|
/// and the user must approve the new capabilities.
|
||||||
pub manifest_hash: Option<String>,
|
pub manifest_hash: Option<String>,
|
||||||
/// stores the version hash of a failed auto-install attempt, which can be
|
|
||||||
/// later installed by the user by approving new caps.
|
|
||||||
pub pending_update_hash: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// this seems cleaner to me right now with pending_update_hash, but given how we serialize
|
// this seems cleaner to me right now with pending_update_hash, but given how we serialize
|
||||||
@ -133,7 +130,6 @@ impl State {
|
|||||||
verified: true, // implicitly verified (TODO re-evaluate)
|
verified: true, // implicitly verified (TODO re-evaluate)
|
||||||
caps_approved: false, // must re-approve if you want to do something ??
|
caps_approved: false, // must re-approve if you want to do something ??
|
||||||
manifest_hash: Some(manifest_hash),
|
manifest_hash: Some(manifest_hash),
|
||||||
pending_update_hash: None, // ... this could be a separate state saved. don't want to reflect this info on-disk as a file.
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -147,3 +143,76 @@ impl State {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(transparent)]
|
||||||
|
pub struct Updates {
|
||||||
|
#[serde(with = "package_id_map")]
|
||||||
|
pub package_updates: HashMap<PackageId, HashMap<String, UpdateInfo>>, // package id -> version_hash -> update info
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Updates {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
package_updates: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateInfo {
|
||||||
|
pub errors: Vec<(String, DownloadError)>, // errors collected by downloads process
|
||||||
|
pub pending_manifest_hash: Option<String>, // pending manifest hash that differed from the installed one
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Updates {
|
||||||
|
pub fn load() -> Self {
|
||||||
|
let bytes = get_state();
|
||||||
|
|
||||||
|
if let Some(bytes) = bytes {
|
||||||
|
serde_json::from_slice(&bytes).unwrap_or_default()
|
||||||
|
} else {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save(&self) {
|
||||||
|
let bytes = serde_json::to_vec(self).unwrap_or_default();
|
||||||
|
set_state(&bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// note: serde_json doesn't support non-string keys when serializing maps, so
|
||||||
|
// we have to use a custom simple serializer.
|
||||||
|
mod package_id_map {
|
||||||
|
use super::*;
|
||||||
|
use std::{collections::HashMap, str::FromStr};
|
||||||
|
|
||||||
|
pub fn serialize<S>(
|
||||||
|
map: &HashMap<PackageId, HashMap<String, UpdateInfo>>,
|
||||||
|
s: S,
|
||||||
|
) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
use serde::ser::SerializeMap;
|
||||||
|
let mut map_ser = s.serialize_map(Some(map.len()))?;
|
||||||
|
for (k, v) in map {
|
||||||
|
map_ser.serialize_entry(&k.to_string(), v)?;
|
||||||
|
}
|
||||||
|
map_ser.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize<'de, D>(
|
||||||
|
d: D,
|
||||||
|
) -> Result<HashMap<PackageId, HashMap<String, UpdateInfo>>, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let string_map = HashMap::<String, HashMap<String, UpdateInfo>>::deserialize(d)?;
|
||||||
|
Ok(string_map
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(k, v)| PackageId::from_str(&k).ok().map(|pid| (pid, v)))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -225,7 +225,6 @@ pub fn install(
|
|||||||
verified: true, // sideloaded apps are implicitly verified because there is no "source" to verify against
|
verified: true, // sideloaded apps are implicitly verified because there is no "source" to verify against
|
||||||
caps_approved: true, // TODO see if we want to auto-approve local installs
|
caps_approved: true, // TODO see if we want to auto-approve local installs
|
||||||
manifest_hash: Some(manifest_hash),
|
manifest_hash: Some(manifest_hash),
|
||||||
pending_update_hash: None, // TODO: doublecheck if problematically overwrites auto_update state.
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok(extracted) = extract_api(&process_package_id) {
|
if let Ok(extracted) = extract_api(&process_package_id) {
|
||||||
|
@ -11,7 +11,7 @@ alloy-primitives = "0.8.15"
|
|||||||
alloy-sol-types = "0.8.15"
|
alloy-sol-types = "0.8.15"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -33,14 +33,14 @@ use alloy_primitives::keccak256;
|
|||||||
use alloy_sol_types::SolEvent;
|
use alloy_sol_types::SolEvent;
|
||||||
use kinode::process::chain::ChainResponses;
|
use kinode::process::chain::ChainResponses;
|
||||||
use kinode_process_lib::{
|
use kinode_process_lib::{
|
||||||
await_message, call_init, eth, get_blob, get_state, http, kernel_types as kt, kimap,
|
await_message, call_init, eth, get_blob, http, kernel_types as kt, kimap, print_to_terminal,
|
||||||
print_to_terminal, println, timer, Address, Message, PackageId, Request, Response,
|
println,
|
||||||
|
sqlite::{self, Sqlite},
|
||||||
|
timer, Address, Message, PackageId, Request, Response,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::collections::HashMap;
|
||||||
collections::{HashMap, HashSet},
|
use std::str::FromStr;
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
wit_bindgen::generate!({
|
wit_bindgen::generate!({
|
||||||
path: "target/wit",
|
path: "target/wit",
|
||||||
@ -63,7 +63,6 @@ const KIMAP_ADDRESS: &str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C";
|
|||||||
|
|
||||||
const DELAY_MS: u64 = 1_000; // 1s
|
const DELAY_MS: u64 = 1_000; // 1s
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct State {
|
pub struct State {
|
||||||
/// the kimap helper we are using
|
/// the kimap helper we are using
|
||||||
pub kimap: kimap::Kimap,
|
pub kimap: kimap::Kimap,
|
||||||
@ -71,10 +70,8 @@ pub struct State {
|
|||||||
/// when we boot, we can read logs starting from this block and
|
/// when we boot, we can read logs starting from this block and
|
||||||
/// rebuild latest state.
|
/// rebuild latest state.
|
||||||
pub last_saved_block: u64,
|
pub last_saved_block: u64,
|
||||||
/// onchain listings
|
/// tables: listings: <packade_id, listing>, published: vec<package_id>
|
||||||
pub listings: HashMap<PackageId, PackageListing>,
|
pub db: DB,
|
||||||
/// set of packages that we have published
|
|
||||||
pub published: HashSet<PackageId>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// listing information derived from metadata hash in listing event
|
/// listing information derived from metadata hash in listing event
|
||||||
@ -83,10 +80,9 @@ pub struct PackageListing {
|
|||||||
pub tba: eth::Address,
|
pub tba: eth::Address,
|
||||||
pub metadata_uri: String,
|
pub metadata_uri: String,
|
||||||
pub metadata_hash: String,
|
pub metadata_hash: String,
|
||||||
// should this even be optional?
|
|
||||||
// relegate to only valid apps maybe?
|
|
||||||
pub metadata: Option<kt::Erc721Metadata>,
|
pub metadata: Option<kt::Erc721Metadata>,
|
||||||
pub auto_update: bool,
|
pub auto_update: bool,
|
||||||
|
pub block: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
|
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
|
||||||
@ -96,18 +92,287 @@ pub enum Req {
|
|||||||
Request(ChainRequests),
|
Request(ChainRequests),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct DB {
|
||||||
|
inner: Sqlite,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DB {
|
||||||
|
pub fn connect(our: &Address) -> anyhow::Result<Self> {
|
||||||
|
let inner = sqlite::open(our.package_id(), "app_store_chain.sqlite", Some(10))?;
|
||||||
|
// create tables
|
||||||
|
inner.write(CREATE_META_TABLE.into(), vec![], None)?;
|
||||||
|
inner.write(CREATE_LISTINGS_TABLE.into(), vec![], None)?;
|
||||||
|
inner.write(CREATE_PUBLISHED_TABLE.into(), vec![], None)?;
|
||||||
|
|
||||||
|
Ok(Self { inner })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset(&self, our: &Address) {
|
||||||
|
if let Err(e) = sqlite::remove_db(our.package_id(), "app_store_chain.sqlite", None) {
|
||||||
|
println!("failed to reset app_store DB: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_last_saved_block(&self) -> anyhow::Result<u64> {
|
||||||
|
let query = "SELECT value FROM meta WHERE key = 'last_saved_block'";
|
||||||
|
let rows = self.inner.read(query.into(), vec![])?;
|
||||||
|
if let Some(row) = rows.get(0) {
|
||||||
|
if let Some(val_str) = row.get("value").and_then(|v| v.as_str()) {
|
||||||
|
if let Ok(block) = val_str.parse::<u64>() {
|
||||||
|
return Ok(block);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_last_saved_block(&self, block: u64) -> anyhow::Result<()> {
|
||||||
|
let query = "INSERT INTO meta (key, value) VALUES ('last_saved_block', ?)
|
||||||
|
ON CONFLICT(key) DO UPDATE SET value=excluded.value";
|
||||||
|
let params = vec![block.to_string().into()];
|
||||||
|
self.inner.write(query.into(), params, None)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_or_update_listing(
|
||||||
|
&self,
|
||||||
|
package_id: &PackageId,
|
||||||
|
listing: &PackageListing,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let metadata_json = if let Some(m) = &listing.metadata {
|
||||||
|
serde_json::to_string(m)?
|
||||||
|
} else {
|
||||||
|
"".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
let query = "INSERT INTO listings (package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT(package_name, publisher_node)
|
||||||
|
DO UPDATE SET
|
||||||
|
tba=excluded.tba,
|
||||||
|
metadata_uri=excluded.metadata_uri,
|
||||||
|
metadata_hash=excluded.metadata_hash,
|
||||||
|
metadata_json=excluded.metadata_json,
|
||||||
|
auto_update=excluded.auto_update,
|
||||||
|
block=excluded.block";
|
||||||
|
let params = vec![
|
||||||
|
package_id.package_name.clone().into(),
|
||||||
|
package_id.publisher_node.clone().into(),
|
||||||
|
listing.tba.to_string().into(),
|
||||||
|
listing.metadata_uri.clone().into(),
|
||||||
|
listing.metadata_hash.clone().into(),
|
||||||
|
metadata_json.into(),
|
||||||
|
(if listing.auto_update { 1 } else { 0 }).into(),
|
||||||
|
listing.block.into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
self.inner.write(query.into(), params, None)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_listing(&self, package_id: &PackageId) -> anyhow::Result<()> {
|
||||||
|
let query = "DELETE FROM listings WHERE package_name = ? AND publisher_node = ?";
|
||||||
|
let params = vec![
|
||||||
|
package_id.package_name.clone().into(),
|
||||||
|
package_id.publisher_node.clone().into(),
|
||||||
|
];
|
||||||
|
self.inner.write(query.into(), params, None)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_listing(&self, package_id: &PackageId) -> anyhow::Result<Option<PackageListing>> {
|
||||||
|
let query = "SELECT tba, metadata_uri, metadata_hash, metadata_json, auto_update, block FROM listings WHERE package_name = ? AND publisher_node = ?";
|
||||||
|
let params = vec![
|
||||||
|
package_id.package_name.clone().into(),
|
||||||
|
package_id.publisher_node.clone().into(),
|
||||||
|
];
|
||||||
|
let rows = self.inner.read(query.into(), params)?;
|
||||||
|
if let Some(row) = rows.get(0) {
|
||||||
|
Ok(Some(self.row_to_listing(row)?))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all_listings(&self) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
|
||||||
|
let query = "SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block FROM listings";
|
||||||
|
let rows = self.inner.read(query.into(), vec![])?;
|
||||||
|
let mut listings = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
let pid = PackageId {
|
||||||
|
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
|
||||||
|
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
|
||||||
|
};
|
||||||
|
let listing = self.row_to_listing(&row)?;
|
||||||
|
listings.push((pid, listing));
|
||||||
|
}
|
||||||
|
Ok(listings)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_listings_batch(
|
||||||
|
&self,
|
||||||
|
limit: u64,
|
||||||
|
offset: u64,
|
||||||
|
) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
|
||||||
|
let query = format!(
|
||||||
|
"SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block
|
||||||
|
FROM listings
|
||||||
|
ORDER BY package_name, publisher_node
|
||||||
|
LIMIT {} OFFSET {}",
|
||||||
|
limit, offset
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = self.inner.read(query, vec![])?;
|
||||||
|
let mut listings = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
let pid = PackageId {
|
||||||
|
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
|
||||||
|
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
|
||||||
|
};
|
||||||
|
let listing = self.row_to_listing(&row)?;
|
||||||
|
listings.push((pid, listing));
|
||||||
|
}
|
||||||
|
Ok(listings)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_listings_since_block(
|
||||||
|
&self,
|
||||||
|
block_number: u64,
|
||||||
|
) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
|
||||||
|
let query = "SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block
|
||||||
|
FROM listings
|
||||||
|
WHERE block > ?";
|
||||||
|
let params = vec![block_number.into()];
|
||||||
|
let rows = self.inner.read(query.into(), params)?;
|
||||||
|
let mut listings = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
let pid = PackageId {
|
||||||
|
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
|
||||||
|
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
|
||||||
|
};
|
||||||
|
let listing = self.row_to_listing(&row)?;
|
||||||
|
listings.push((pid, listing));
|
||||||
|
}
|
||||||
|
Ok(listings)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn row_to_listing(
|
||||||
|
&self,
|
||||||
|
row: &HashMap<String, serde_json::Value>,
|
||||||
|
) -> anyhow::Result<PackageListing> {
|
||||||
|
let tba_str = row["tba"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Invalid tba"))?;
|
||||||
|
let tba = tba_str.parse::<eth::Address>()?;
|
||||||
|
let metadata_uri = row["metadata_uri"].as_str().unwrap_or("").to_string();
|
||||||
|
let metadata_hash = row["metadata_hash"].as_str().unwrap_or("").to_string();
|
||||||
|
let metadata_json = row["metadata_json"].as_str().unwrap_or("");
|
||||||
|
let metadata: Option<kinode_process_lib::kernel_types::Erc721Metadata> =
|
||||||
|
if metadata_json.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
serde_json::from_str(metadata_json)?
|
||||||
|
};
|
||||||
|
let auto_update = row["auto_update"].as_i64().unwrap_or(0) == 1;
|
||||||
|
let block = row["block"].as_i64().unwrap_or(0) as u64;
|
||||||
|
|
||||||
|
Ok(PackageListing {
|
||||||
|
tba,
|
||||||
|
metadata_uri,
|
||||||
|
metadata_hash,
|
||||||
|
metadata,
|
||||||
|
auto_update,
|
||||||
|
block,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_published(&self, package_id: &PackageId) -> anyhow::Result<bool> {
|
||||||
|
let query = "SELECT 1 FROM published WHERE package_name = ? AND publisher_node = ?";
|
||||||
|
let params = vec![
|
||||||
|
package_id.package_name.clone().into(),
|
||||||
|
package_id.publisher_node.clone().into(),
|
||||||
|
];
|
||||||
|
let rows = self.inner.read(query.into(), params)?;
|
||||||
|
Ok(!rows.is_empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_published(&self, package_id: &PackageId) -> anyhow::Result<()> {
|
||||||
|
let query = "INSERT INTO published (package_name, publisher_node) VALUES (?, ?) ON CONFLICT DO NOTHING";
|
||||||
|
let params = vec![
|
||||||
|
package_id.package_name.clone().into(),
|
||||||
|
package_id.publisher_node.clone().into(),
|
||||||
|
];
|
||||||
|
self.inner.write(query.into(), params, None)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_published(&self, package_id: &PackageId) -> anyhow::Result<()> {
|
||||||
|
let query = "DELETE FROM published WHERE package_name = ? AND publisher_node = ?";
|
||||||
|
let params = vec![
|
||||||
|
package_id.package_name.clone().into(),
|
||||||
|
package_id.publisher_node.clone().into(),
|
||||||
|
];
|
||||||
|
self.inner.write(query.into(), params, None)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all_published(&self) -> anyhow::Result<Vec<PackageId>> {
|
||||||
|
let query = "SELECT package_name, publisher_node FROM published";
|
||||||
|
let rows = self.inner.read(query.into(), vec![])?;
|
||||||
|
let mut result = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
let pid = PackageId {
|
||||||
|
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
|
||||||
|
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
|
||||||
|
};
|
||||||
|
result.push(pid);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const CREATE_META_TABLE: &str = "
|
||||||
|
CREATE TABLE IF NOT EXISTS meta (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT
|
||||||
|
);";
|
||||||
|
|
||||||
|
const CREATE_LISTINGS_TABLE: &str = "
|
||||||
|
CREATE TABLE IF NOT EXISTS listings (
|
||||||
|
package_name TEXT NOT NULL,
|
||||||
|
publisher_node TEXT NOT NULL,
|
||||||
|
tba TEXT NOT NULL,
|
||||||
|
metadata_uri TEXT,
|
||||||
|
metadata_hash TEXT,
|
||||||
|
metadata_json TEXT,
|
||||||
|
auto_update INTEGER NOT NULL DEFAULT 0,
|
||||||
|
block INTEGER NOT NULL DEFAULT 0,
|
||||||
|
PRIMARY KEY (package_name, publisher_node)
|
||||||
|
);";
|
||||||
|
|
||||||
|
const CREATE_PUBLISHED_TABLE: &str = "
|
||||||
|
CREATE TABLE IF NOT EXISTS published (
|
||||||
|
package_name TEXT NOT NULL,
|
||||||
|
publisher_node TEXT NOT NULL,
|
||||||
|
PRIMARY KEY (package_name, publisher_node)
|
||||||
|
);";
|
||||||
|
|
||||||
call_init!(init);
|
call_init!(init);
|
||||||
fn init(our: Address) {
|
fn init(our: Address) {
|
||||||
println!(
|
|
||||||
"chain started, indexing on contract address {}",
|
|
||||||
KIMAP_ADDRESS
|
|
||||||
);
|
|
||||||
// create new provider with request-timeout of 60s
|
|
||||||
// can change, log requests can take quite a long time.
|
|
||||||
let eth_provider: eth::Provider = eth::Provider::new(CHAIN_ID, CHAIN_TIMEOUT);
|
let eth_provider: eth::Provider = eth::Provider::new(CHAIN_ID, CHAIN_TIMEOUT);
|
||||||
|
|
||||||
let mut state = fetch_state(eth_provider);
|
let db = DB::connect(&our).expect("failed to open DB");
|
||||||
fetch_and_subscribe_logs(&our, &mut state);
|
let kimap_helper =
|
||||||
|
kimap::Kimap::new(eth_provider, eth::Address::from_str(KIMAP_ADDRESS).unwrap());
|
||||||
|
let last_saved_block = db.get_last_saved_block().unwrap_or(0);
|
||||||
|
|
||||||
|
let mut state = State {
|
||||||
|
kimap: kimap_helper,
|
||||||
|
last_saved_block,
|
||||||
|
db,
|
||||||
|
};
|
||||||
|
|
||||||
|
fetch_and_subscribe_logs(&our, &mut state, last_saved_block);
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match await_message() {
|
match await_message() {
|
||||||
@ -126,17 +391,15 @@ fn init(our: Address) {
|
|||||||
fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow::Result<()> {
|
fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow::Result<()> {
|
||||||
if !message.is_request() {
|
if !message.is_request() {
|
||||||
if message.is_local(&our) && message.source().process == "timer:distro:sys" {
|
if message.is_local(&our) && message.source().process == "timer:distro:sys" {
|
||||||
// handling of ETH RPC subscriptions delayed by DELAY_MS
|
|
||||||
// to allow kns to have a chance to process block: handle now
|
|
||||||
let Some(context) = message.context() else {
|
let Some(context) = message.context() else {
|
||||||
return Err(anyhow::anyhow!("foo"));
|
return Err(anyhow::anyhow!("No context in timer message"));
|
||||||
};
|
};
|
||||||
let log = serde_json::from_slice(context)?;
|
let log = serde_json::from_slice(context)?;
|
||||||
handle_eth_log(our, state, log, false)?;
|
handle_eth_log(our, state, log, false)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match message.body().try_into()? {
|
match serde_json::from_slice::<Req>(message.body())? {
|
||||||
Req::Eth(eth_result) => {
|
Req::Eth(eth_result) => {
|
||||||
if !message.is_local(our) || message.source().process != "eth:distro:sys" {
|
if !message.is_local(our) || message.source().process != "eth:distro:sys" {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow::anyhow!(
|
||||||
@ -154,7 +417,7 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
|
|||||||
timer::set_timer(DELAY_MS, Some(serde_json::to_vec(log)?));
|
timer::set_timer(DELAY_MS, Some(serde_json::to_vec(log)?));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// attempt to resubscribe
|
// re-subscribe if error
|
||||||
state
|
state
|
||||||
.kimap
|
.kimap
|
||||||
.provider
|
.provider
|
||||||
@ -162,7 +425,7 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Req::Request(chains) => {
|
Req::Request(chains) => {
|
||||||
handle_local_request(state, chains)?;
|
handle_local_request(our, state, chains)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -170,51 +433,44 @@ fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result<()> {
|
fn handle_local_request(
|
||||||
|
our: &Address,
|
||||||
|
state: &mut State,
|
||||||
|
req: ChainRequests,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
match req {
|
match req {
|
||||||
ChainRequests::GetApp(package_id) => {
|
ChainRequests::GetApp(package_id) => {
|
||||||
let onchain_app = state
|
let pid = package_id.clone().to_process_lib();
|
||||||
.listings
|
let listing = state.db.get_listing(&pid)?;
|
||||||
.get(&package_id.clone().to_process_lib())
|
let onchain_app = listing.map(|app| app.to_onchain_app(&pid));
|
||||||
.map(|app| OnchainApp {
|
|
||||||
package_id: package_id,
|
|
||||||
tba: app.tba.to_string(),
|
|
||||||
metadata_uri: app.metadata_uri.clone(),
|
|
||||||
metadata_hash: app.metadata_hash.clone(),
|
|
||||||
metadata: app.metadata.as_ref().map(|m| m.clone().into()),
|
|
||||||
auto_update: app.auto_update,
|
|
||||||
});
|
|
||||||
let response = ChainResponses::GetApp(onchain_app);
|
let response = ChainResponses::GetApp(onchain_app);
|
||||||
Response::new().body(&response).send()?;
|
Response::new().body(&response).send()?;
|
||||||
}
|
}
|
||||||
ChainRequests::GetApps => {
|
ChainRequests::GetApps => {
|
||||||
let apps: Vec<OnchainApp> = state
|
let listings = state.db.get_all_listings()?;
|
||||||
.listings
|
let apps: Vec<OnchainApp> = listings
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|(id, listing)| listing.to_onchain_app(id))
|
.map(|(pid, listing)| listing.to_onchain_app(&pid))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let response = ChainResponses::GetApps(apps);
|
let response = ChainResponses::GetApps(apps);
|
||||||
Response::new().body(&response).send()?;
|
Response::new().body(&response).send()?;
|
||||||
}
|
}
|
||||||
ChainRequests::GetOurApps => {
|
ChainRequests::GetOurApps => {
|
||||||
let apps: Vec<OnchainApp> = state
|
let published_list = state.db.get_all_published()?;
|
||||||
.published
|
let mut apps = Vec::new();
|
||||||
.iter()
|
for pid in published_list {
|
||||||
.filter_map(|id| {
|
if let Some(listing) = state.db.get_listing(&pid)? {
|
||||||
state
|
apps.push(listing.to_onchain_app(&pid));
|
||||||
.listings
|
}
|
||||||
.get(id)
|
}
|
||||||
.map(|listing| listing.to_onchain_app(id))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let response = ChainResponses::GetOurApps(apps);
|
let response = ChainResponses::GetOurApps(apps);
|
||||||
Response::new().body(&response).send()?;
|
Response::new().body(&response).send()?;
|
||||||
}
|
}
|
||||||
ChainRequests::StartAutoUpdate(package_id) => {
|
ChainRequests::StartAutoUpdate(package_id) => {
|
||||||
if let Some(listing) = state.listings.get_mut(&package_id.to_process_lib()) {
|
let pid = package_id.to_process_lib();
|
||||||
|
if let Some(mut listing) = state.db.get_listing(&pid)? {
|
||||||
listing.auto_update = true;
|
listing.auto_update = true;
|
||||||
|
state.db.insert_or_update_listing(&pid, &listing)?;
|
||||||
let response = ChainResponses::AutoUpdateStarted;
|
let response = ChainResponses::AutoUpdateStarted;
|
||||||
Response::new().body(&response).send()?;
|
Response::new().body(&response).send()?;
|
||||||
} else {
|
} else {
|
||||||
@ -223,8 +479,10 @@ fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ChainRequests::StopAutoUpdate(package_id) => {
|
ChainRequests::StopAutoUpdate(package_id) => {
|
||||||
if let Some(listing) = state.listings.get_mut(&package_id.to_process_lib()) {
|
let pid = package_id.to_process_lib();
|
||||||
|
if let Some(mut listing) = state.db.get_listing(&pid)? {
|
||||||
listing.auto_update = false;
|
listing.auto_update = false;
|
||||||
|
state.db.insert_or_update_listing(&pid, &listing)?;
|
||||||
let response = ChainResponses::AutoUpdateStopped;
|
let response = ChainResponses::AutoUpdateStopped;
|
||||||
Response::new().body(&response).send()?;
|
Response::new().body(&response).send()?;
|
||||||
} else {
|
} else {
|
||||||
@ -232,6 +490,11 @@ fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result
|
|||||||
Response::new().body(&error_response).send()?;
|
Response::new().body(&error_response).send()?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ChainRequests::Reset => {
|
||||||
|
state.db.reset(&our);
|
||||||
|
Response::new().body(&ChainResponses::ResetOk).send()?;
|
||||||
|
panic!("resetting state, restarting!");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -258,7 +521,7 @@ fn handle_eth_log(
|
|||||||
if package.is_empty() || publisher.is_empty() {
|
if package.is_empty() || publisher.is_empty() {
|
||||||
Err(anyhow::anyhow!("invalid publisher name"))
|
Err(anyhow::anyhow!("invalid publisher name"))
|
||||||
} else {
|
} else {
|
||||||
Ok(PackageId::new(&package, &publisher))
|
Ok(PackageId::new(package, publisher))
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
@ -267,7 +530,7 @@ fn handle_eth_log(
|
|||||||
// at the URI.
|
// at the URI.
|
||||||
|
|
||||||
let metadata_uri = String::from_utf8_lossy(¬e.data).to_string();
|
let metadata_uri = String::from_utf8_lossy(¬e.data).to_string();
|
||||||
let is_our_package = &package_id.publisher() == &our.node();
|
let is_our_package = package_id.publisher() == our.node();
|
||||||
|
|
||||||
let (tba, metadata_hash) = if !startup {
|
let (tba, metadata_hash) = if !startup {
|
||||||
// generate ~metadata-hash full-path
|
// generate ~metadata-hash full-path
|
||||||
@ -292,10 +555,12 @@ fn handle_eth_log(
|
|||||||
|
|
||||||
match data {
|
match data {
|
||||||
None => {
|
None => {
|
||||||
// if ~metadata-uri is also empty, this is an unpublish action!
|
// unpublish if metadata_uri empty
|
||||||
if metadata_uri.is_empty() {
|
if metadata_uri.is_empty() {
|
||||||
state.published.remove(&package_id);
|
state.db.delete_published(&package_id)?;
|
||||||
state.listings.remove(&package_id);
|
state.db.delete_listing(&package_id)?;
|
||||||
|
state.last_saved_block = block_number;
|
||||||
|
state.db.set_last_saved_block(block_number)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow::anyhow!(
|
||||||
@ -309,7 +574,7 @@ fn handle_eth_log(
|
|||||||
};
|
};
|
||||||
|
|
||||||
if is_our_package {
|
if is_our_package {
|
||||||
state.published.insert(package_id.clone());
|
state.db.insert_published(&package_id)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// if this is a startup event, we don't need to fetch metadata from the URI --
|
// if this is a startup event, we don't need to fetch metadata from the URI --
|
||||||
@ -322,109 +587,158 @@ fn handle_eth_log(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
match state.listings.entry(package_id.clone()) {
|
let mut listing = state
|
||||||
std::collections::hash_map::Entry::Occupied(mut listing) => {
|
.db
|
||||||
let listing = listing.get_mut();
|
.get_listing(&package_id)?
|
||||||
listing.metadata_uri = metadata_uri;
|
.unwrap_or(PackageListing {
|
||||||
listing.tba = tba;
|
tba,
|
||||||
listing.metadata_hash = metadata_hash;
|
metadata_uri: metadata_uri.clone(),
|
||||||
listing.metadata = metadata.clone();
|
metadata_hash: metadata_hash.clone(),
|
||||||
}
|
metadata: metadata.clone(),
|
||||||
std::collections::hash_map::Entry::Vacant(listing) => {
|
auto_update: false,
|
||||||
listing.insert(PackageListing {
|
block: block_number,
|
||||||
tba,
|
});
|
||||||
metadata_uri,
|
// update fields
|
||||||
metadata_hash,
|
listing.tba = tba;
|
||||||
metadata: metadata.clone(),
|
listing.metadata_uri = metadata_uri;
|
||||||
auto_update: false,
|
listing.metadata_hash = metadata_hash;
|
||||||
});
|
listing.metadata = metadata.clone();
|
||||||
}
|
|
||||||
|
state.db.insert_or_update_listing(&package_id, &listing)?;
|
||||||
|
|
||||||
|
if !startup && listing.auto_update {
|
||||||
|
println!("kicking off auto-update for: {}", package_id);
|
||||||
|
Request::to(("our", "downloads", "app_store", "sys"))
|
||||||
|
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
|
||||||
|
package_id: crate::kinode::process::main::PackageId::from_process_lib(
|
||||||
|
package_id.clone(),
|
||||||
|
),
|
||||||
|
metadata: metadata.unwrap().into(),
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if !startup {
|
if !startup {
|
||||||
// if auto_update is enabled, send a message to downloads to kick off the update.
|
state.last_saved_block = block_number;
|
||||||
if let Some(listing) = state.listings.get(&package_id) {
|
state.db.set_last_saved_block(block_number)?;
|
||||||
if listing.auto_update {
|
|
||||||
print_to_terminal(0, &format!("kicking off auto-update for: {}", package_id));
|
|
||||||
Request::to(("our", "downloads", "app-store", "sys"))
|
|
||||||
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
|
|
||||||
package_id: crate::kinode::process::main::PackageId::from_process_lib(
|
|
||||||
package_id,
|
|
||||||
),
|
|
||||||
metadata: metadata.unwrap().into(),
|
|
||||||
}))
|
|
||||||
.send()
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
state.last_saved_block = block_number;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// after startup, fetch metadata for all listings
|
/// after startup, fetch metadata for all listings
|
||||||
/// we do this as a separate step to not repeatedly fetch outdated metadata
|
/// we do this as a separate step to not repeatedly fetch outdated metadata
|
||||||
/// as we process logs.
|
/// as we process logs.
|
||||||
fn update_all_metadata(state: &mut State) {
|
fn update_all_metadata(state: &mut State, last_saved_block: u64) {
|
||||||
state.listings.retain(|package_id, listing| {
|
let updated_listings = match state.db.get_listings_since_block(last_saved_block) {
|
||||||
let (tba, metadata_hash) = {
|
Ok(listings) => listings,
|
||||||
// generate ~metadata-hash full-path
|
Err(e) => {
|
||||||
let hash_note = format!(
|
print_to_terminal(
|
||||||
"~metadata-hash.{}.{}",
|
1,
|
||||||
package_id.package(),
|
&format!("error fetching updated listings since block {last_saved_block}: {e}"),
|
||||||
package_id.publisher()
|
|
||||||
);
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// owner can change which we don't track (yet?) so don't save, need to get when desired
|
for (pid, mut listing) in updated_listings {
|
||||||
let Ok((tba, _owner, data)) = (match state.kimap.get(&hash_note) {
|
let hash_note = format!("~metadata-hash.{}.{}", pid.package(), pid.publisher());
|
||||||
Ok(gr) => Ok(gr),
|
let (tba, metadata_hash) = match state.kimap.get(&hash_note) {
|
||||||
Err(e) => match e {
|
Ok((t, _o, data)) => {
|
||||||
eth::EthError::RpcError(_) => {
|
match data {
|
||||||
// retry on RpcError after DELAY_MS sleep
|
None => {
|
||||||
// sleep here rather than with, e.g., a message to
|
// If metadata_uri empty, unpublish
|
||||||
// `timer:distro:sys` so that events are processed in
|
if listing.metadata_uri.is_empty() {
|
||||||
// order of receipt
|
if let Err(e) = state.db.delete_published(&pid) {
|
||||||
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
|
print_to_terminal(1, &format!("error deleting published: {e}"));
|
||||||
state.kimap.get(&hash_note)
|
}
|
||||||
|
}
|
||||||
|
if let Err(e) = state.db.delete_listing(&pid) {
|
||||||
|
print_to_terminal(1, &format!("error deleting listing: {e}"));
|
||||||
|
}
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
_ => Err(e),
|
Some(hash_note) => (t, String::from_utf8_lossy(&hash_note).to_string()),
|
||||||
},
|
}
|
||||||
}) else {
|
}
|
||||||
return false;
|
Err(e) => {
|
||||||
};
|
// If RpcError, retry once after delay
|
||||||
|
if let eth::EthError::RpcError(_) = e {
|
||||||
match data {
|
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
|
||||||
None => {
|
match state.kimap.get(&hash_note) {
|
||||||
// if ~metadata-uri is also empty, this is an unpublish action!
|
Ok((t, _o, data)) => {
|
||||||
if listing.metadata_uri.is_empty() {
|
if let Some(hash_note) = data {
|
||||||
state.published.remove(package_id);
|
(t, String::from_utf8_lossy(&hash_note).to_string())
|
||||||
}
|
} else {
|
||||||
return false;
|
// no data again after retry
|
||||||
|
if listing.metadata_uri.is_empty() {
|
||||||
|
if let Err(e) = state.db.delete_published(&pid) {
|
||||||
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
&format!("error deleting published: {e}"),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Err(e) = state.db.delete_listing(&pid) {
|
||||||
|
print_to_terminal(1, &format!("error deleting listing: {e}"));
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e2) => {
|
||||||
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
&format!("error retrieving metadata-hash after retry: {e2:?}"),
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
&format!("error retrieving metadata-hash: {e:?} for {pid}"),
|
||||||
|
);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
Some(hash_note) => (tba, String::from_utf8_lossy(&hash_note).to_string()),
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Update listing fields
|
||||||
listing.tba = tba;
|
listing.tba = tba;
|
||||||
listing.metadata_hash = metadata_hash;
|
listing.metadata_hash = metadata_hash;
|
||||||
|
|
||||||
let metadata =
|
let metadata =
|
||||||
fetch_metadata_from_url(&listing.metadata_uri, &listing.metadata_hash, 30).ok();
|
match fetch_metadata_from_url(&listing.metadata_uri, &listing.metadata_hash, 30) {
|
||||||
|
Ok(md) => Some(md),
|
||||||
|
Err(err) => {
|
||||||
|
print_to_terminal(1, &format!("error fetching metadata for {}: {err}", pid));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
listing.metadata = metadata.clone();
|
listing.metadata = metadata.clone();
|
||||||
if listing.auto_update {
|
|
||||||
print_to_terminal(0, &format!("kicking off auto-update for: {}", package_id));
|
if let Err(e) = state.db.insert_or_update_listing(&pid, &listing) {
|
||||||
Request::to(("our", "downloads", "app-store", "sys"))
|
print_to_terminal(1, &format!("error updating listing {}: {e}", pid));
|
||||||
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
|
|
||||||
package_id: crate::kinode::process::main::PackageId::from_process_lib(
|
|
||||||
package_id.clone(),
|
|
||||||
),
|
|
||||||
metadata: metadata.unwrap().into(),
|
|
||||||
}))
|
|
||||||
.send()
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
true
|
|
||||||
});
|
if listing.auto_update {
|
||||||
|
if let Some(md) = metadata {
|
||||||
|
print_to_terminal(0, &format!("kicking off auto-update for: {}", pid));
|
||||||
|
if let Err(e) = Request::to(("our", "downloads", "app_store", "sys"))
|
||||||
|
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
|
||||||
|
package_id: crate::kinode::process::main::PackageId::from_process_lib(
|
||||||
|
pid.clone(),
|
||||||
|
),
|
||||||
|
metadata: md.into(),
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
{
|
||||||
|
print_to_terminal(1, &format!("error sending auto-update request: {e}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// create the filter used for app store getLogs and subscription.
|
/// create the filter used for app store getLogs and subscription.
|
||||||
@ -443,21 +757,25 @@ pub fn app_store_filter(state: &State) -> eth::Filter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// create a filter to fetch app store event logs from chain and subscribe to new events
|
/// create a filter to fetch app store event logs from chain and subscribe to new events
|
||||||
pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State) {
|
pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State, last_saved_block: u64) {
|
||||||
let filter = app_store_filter(state);
|
let filter = app_store_filter(state);
|
||||||
// get past logs, subscribe to new ones.
|
// get past logs, subscribe to new ones.
|
||||||
// subscribe first so we don't miss any logs
|
// subscribe first so we don't miss any logs
|
||||||
println!("subscribing...");
|
|
||||||
state.kimap.provider.subscribe_loop(1, filter.clone());
|
state.kimap.provider.subscribe_loop(1, filter.clone());
|
||||||
for log in fetch_logs(
|
// println!("fetching old logs from block {last_saved_block}");
|
||||||
&state.kimap.provider,
|
for log in fetch_logs(&state.kimap.provider, &filter.from_block(last_saved_block)) {
|
||||||
&filter.from_block(state.last_saved_block),
|
|
||||||
) {
|
|
||||||
if let Err(e) = handle_eth_log(our, state, log, true) {
|
if let Err(e) = handle_eth_log(our, state, log, true) {
|
||||||
print_to_terminal(1, &format!("error ingesting log: {e}"));
|
print_to_terminal(1, &format!("error ingesting log: {e}"));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
update_all_metadata(state);
|
|
||||||
|
update_all_metadata(state, last_saved_block);
|
||||||
|
// save updated last_saved_block
|
||||||
|
if let Ok(block_number) = state.kimap.provider.get_block_number() {
|
||||||
|
state.last_saved_block = block_number;
|
||||||
|
state.db.set_last_saved_block(block_number).unwrap();
|
||||||
|
}
|
||||||
|
// println!("up to date to block {}", state.last_saved_block);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// fetch logs from the chain with a given filter
|
/// fetch logs from the chain with a given filter
|
||||||
@ -506,32 +824,6 @@ pub fn keccak_256_hash(bytes: &[u8]) -> String {
|
|||||||
format!("0x{:x}", hasher.finalize())
|
format!("0x{:x}", hasher.finalize())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// fetch state from disk or create a new one if that fails
|
|
||||||
pub fn fetch_state(provider: eth::Provider) -> State {
|
|
||||||
if let Some(state_bytes) = get_state() {
|
|
||||||
match serde_json::from_slice::<State>(&state_bytes) {
|
|
||||||
Ok(state) => {
|
|
||||||
if state.kimap.address().to_string() == KIMAP_ADDRESS {
|
|
||||||
return state;
|
|
||||||
} else {
|
|
||||||
println!(
|
|
||||||
"state contract address mismatch. rebuilding state! expected {}, got {}",
|
|
||||||
KIMAP_ADDRESS,
|
|
||||||
state.kimap.address().to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => println!("failed to deserialize saved state, rebuilding: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
State {
|
|
||||||
kimap: kimap::Kimap::new(provider, eth::Address::from_str(KIMAP_ADDRESS).unwrap()),
|
|
||||||
last_saved_block: 0,
|
|
||||||
listings: HashMap::new(),
|
|
||||||
published: HashSet::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// quite annoyingly, we must convert from our gen'd version of PackageId
|
// quite annoyingly, we must convert from our gen'd version of PackageId
|
||||||
// to the process_lib's gen'd version. this is in order to access custom
|
// to the process_lib's gen'd version. this is in order to access custom
|
||||||
// Impls that we want to use
|
// Impls that we want to use
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -42,13 +42,18 @@
|
|||||||
//! mechanism is implemented in the FT worker for improved modularity and performance.
|
//! mechanism is implemented in the FT worker for improved modularity and performance.
|
||||||
//!
|
//!
|
||||||
use crate::kinode::process::downloads::{
|
use crate::kinode::process::downloads::{
|
||||||
AutoDownloadCompleteRequest, AutoUpdateRequest, DirEntry, DownloadCompleteRequest,
|
AutoDownloadCompleteRequest, AutoDownloadError, AutoUpdateRequest, DirEntry,
|
||||||
DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry, HashMismatch,
|
DownloadCompleteRequest, DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry,
|
||||||
LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
|
HashMismatch, LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
|
||||||
|
};
|
||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
io::Read,
|
||||||
|
str::FromStr,
|
||||||
};
|
};
|
||||||
use std::{collections::HashSet, io::Read, str::FromStr};
|
|
||||||
|
|
||||||
use ft_worker_lib::{spawn_receive_transfer, spawn_send_transfer};
|
use ft_worker_lib::{spawn_receive_transfer, spawn_send_transfer};
|
||||||
|
use kinode::process::downloads::AutoDownloadSuccess;
|
||||||
use kinode_process_lib::{
|
use kinode_process_lib::{
|
||||||
await_message, call_init, get_blob, get_state,
|
await_message, call_init, get_blob, get_state,
|
||||||
http::client,
|
http::client,
|
||||||
@ -69,7 +74,6 @@ wit_bindgen::generate!({
|
|||||||
mod ft_worker_lib;
|
mod ft_worker_lib;
|
||||||
|
|
||||||
pub const VFS_TIMEOUT: u64 = 5; // 5s
|
pub const VFS_TIMEOUT: u64 = 5; // 5s
|
||||||
pub const APP_SHARE_TIMEOUT: u64 = 120; // 120s
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
|
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
|
||||||
#[serde(untagged)] // untagged as a meta-type for all incoming responses
|
#[serde(untagged)] // untagged as a meta-type for all incoming responses
|
||||||
@ -78,6 +82,15 @@ pub enum Resp {
|
|||||||
HttpClient(Result<client::HttpClientResponse, client::HttpClientError>),
|
HttpClient(Result<client::HttpClientResponse, client::HttpClientError>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
|
pub struct AutoUpdateStatus {
|
||||||
|
mirrors_left: HashSet<String>, // set(node/url)
|
||||||
|
mirrors_failed: Vec<(String, DownloadError)>, // vec(node/url, error)
|
||||||
|
active_mirror: String, // (node/url)
|
||||||
|
}
|
||||||
|
|
||||||
|
type AutoUpdates = HashMap<(PackageId, String), AutoUpdateStatus>;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct State {
|
pub struct State {
|
||||||
// persisted metadata about which packages we are mirroring
|
// persisted metadata about which packages we are mirroring
|
||||||
@ -117,13 +130,11 @@ fn init(our: Address) {
|
|||||||
let mut tmp =
|
let mut tmp =
|
||||||
vfs::open_dir("/app-store:sys/downloads/tmp", true, None).expect("could not open tmp");
|
vfs::open_dir("/app-store:sys/downloads/tmp", true, None).expect("could not open tmp");
|
||||||
|
|
||||||
let mut auto_updates: HashSet<(PackageId, String)> = HashSet::new();
|
// metadata for in-flight auto-updates
|
||||||
|
let mut auto_updates: AutoUpdates = HashMap::new();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match await_message() {
|
match await_message() {
|
||||||
Err(send_error) => {
|
|
||||||
print_to_terminal(1, &format!("downloads: got network error: {send_error}"));
|
|
||||||
}
|
|
||||||
Ok(message) => {
|
Ok(message) => {
|
||||||
if let Err(e) = handle_message(
|
if let Err(e) = handle_message(
|
||||||
&our,
|
&our,
|
||||||
@ -143,6 +154,33 @@ fn init(our: Address) {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Err(send_error) => {
|
||||||
|
print_to_terminal(1, &format!("downloads: got network error: {send_error}"));
|
||||||
|
if let Some(context) = &send_error.context {
|
||||||
|
if let Ok(download_request) =
|
||||||
|
serde_json::from_slice::<LocalDownloadRequest>(&context)
|
||||||
|
{
|
||||||
|
let key = (
|
||||||
|
download_request.package_id.to_process_lib(),
|
||||||
|
download_request.desired_version_hash.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get the error first
|
||||||
|
let error = if send_error.kind.is_timeout() {
|
||||||
|
DownloadError::Timeout
|
||||||
|
} else if send_error.kind.is_offline() {
|
||||||
|
DownloadError::Offline
|
||||||
|
} else {
|
||||||
|
DownloadError::HandlingError(send_error.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
|
// Then remove and get metadata
|
||||||
|
if let Some(metadata) = auto_updates.remove(&key) {
|
||||||
|
try_next_mirror(metadata, key, &mut auto_updates, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -157,7 +195,7 @@ fn handle_message(
|
|||||||
message: &Message,
|
message: &Message,
|
||||||
downloads: &mut Directory,
|
downloads: &mut Directory,
|
||||||
_tmp: &mut Directory,
|
_tmp: &mut Directory,
|
||||||
auto_updates: &mut HashSet<(PackageId, String)>,
|
auto_updates: &mut AutoUpdates,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
if message.is_request() {
|
if message.is_request() {
|
||||||
match message.body().try_into()? {
|
match message.body().try_into()? {
|
||||||
@ -174,8 +212,12 @@ fn handle_message(
|
|||||||
} = download_request.clone();
|
} = download_request.clone();
|
||||||
|
|
||||||
if download_from.starts_with("http") {
|
if download_from.starts_with("http") {
|
||||||
// use http-client to GET it
|
// use http_client to GET it
|
||||||
Request::to(("our", "http-client", "distro", "sys"))
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
"kicking off http download for {package_id:?} and {version_hash:?}",
|
||||||
|
);
|
||||||
|
Request::to(("our", "http_client", "distro", "sys"))
|
||||||
.body(
|
.body(
|
||||||
serde_json::to_vec(&client::HttpClientAction::Http(
|
serde_json::to_vec(&client::HttpClientAction::Http(
|
||||||
client::OutgoingHttpRequest {
|
client::OutgoingHttpRequest {
|
||||||
@ -200,7 +242,6 @@ fn handle_message(
|
|||||||
&package_id,
|
&package_id,
|
||||||
&desired_version_hash,
|
&desired_version_hash,
|
||||||
&download_from,
|
&download_from,
|
||||||
APP_SHARE_TIMEOUT,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Request::to((&download_from, "downloads", "app-store", "sys"))
|
Request::to((&download_from, "downloads", "app-store", "sys"))
|
||||||
@ -236,13 +277,8 @@ fn handle_message(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let target_worker = Address::from_str(&worker_address)?;
|
let target_worker = Address::from_str(&worker_address)?;
|
||||||
let _ = spawn_send_transfer(
|
let _ =
|
||||||
our,
|
spawn_send_transfer(our, &package_id, &desired_version_hash, &target_worker)?;
|
||||||
&package_id,
|
|
||||||
&desired_version_hash,
|
|
||||||
APP_SHARE_TIMEOUT,
|
|
||||||
&target_worker,
|
|
||||||
)?;
|
|
||||||
let resp = DownloadResponses::Success;
|
let resp = DownloadResponses::Success;
|
||||||
Response::new().body(&resp).send()?;
|
Response::new().body(&resp).send()?;
|
||||||
}
|
}
|
||||||
@ -257,50 +293,30 @@ fn handle_message(
|
|||||||
if !message.is_local(our) {
|
if !message.is_local(our) {
|
||||||
return Err(anyhow::anyhow!("got non local download complete"));
|
return Err(anyhow::anyhow!("got non local download complete"));
|
||||||
}
|
}
|
||||||
// if we have a pending auto_install, forward that context to the main process.
|
|
||||||
// it will check if the caps_hashes match (no change in capabilities), and auto_install if it does.
|
|
||||||
|
|
||||||
let manifest_hash = if auto_updates.remove(&(
|
// forward to main:app_store:sys, pushed to UI via websockets
|
||||||
req.package_id.clone().to_process_lib(),
|
Request::to(("our", "main", "app_store", "sys"))
|
||||||
req.version_hash.clone(),
|
|
||||||
)) {
|
|
||||||
match get_manifest_hash(
|
|
||||||
req.package_id.clone().to_process_lib(),
|
|
||||||
req.version_hash.clone(),
|
|
||||||
) {
|
|
||||||
Ok(manifest_hash) => Some(manifest_hash),
|
|
||||||
Err(e) => {
|
|
||||||
print_to_terminal(
|
|
||||||
1,
|
|
||||||
&format!("auto_update: error getting manifest hash: {:?}", e),
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// pushed to UI via websockets
|
|
||||||
Request::to(("our", "main", "app-store", "sys"))
|
|
||||||
.body(serde_json::to_vec(&req)?)
|
.body(serde_json::to_vec(&req)?)
|
||||||
.send()?;
|
.send()?;
|
||||||
|
|
||||||
// trigger auto-update install trigger to main:app-store:sys
|
// Check if this is an auto-update download
|
||||||
if let Some(manifest_hash) = manifest_hash {
|
let key = (
|
||||||
let auto_download_complete_req = AutoDownloadCompleteRequest {
|
req.package_id.clone().to_process_lib(),
|
||||||
download_info: req.clone(),
|
req.version_hash.clone(),
|
||||||
manifest_hash,
|
);
|
||||||
};
|
|
||||||
print_to_terminal(
|
if let Some(metadata) = auto_updates.remove(&key) {
|
||||||
1,
|
if let Some(err) = req.err {
|
||||||
&format!(
|
try_next_mirror(metadata, key, auto_updates, err);
|
||||||
"auto_update download complete: triggering install on main:app-store:sys"
|
} else if let Err(_e) = handle_auto_update_success(key.0.clone(), key.1.clone())
|
||||||
),
|
{
|
||||||
);
|
try_next_mirror(
|
||||||
Request::to(("our", "main", "app-store", "sys"))
|
metadata,
|
||||||
.body(serde_json::to_vec(&auto_download_complete_req)?)
|
key,
|
||||||
.send()?;
|
auto_updates,
|
||||||
|
DownloadError::InvalidManifest,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DownloadRequests::GetFiles(maybe_id) => {
|
DownloadRequests::GetFiles(maybe_id) => {
|
||||||
@ -414,29 +430,61 @@ fn handle_message(
|
|||||||
} = auto_update_request.clone();
|
} = auto_update_request.clone();
|
||||||
let process_lib_package_id = package_id.clone().to_process_lib();
|
let process_lib_package_id = package_id.clone().to_process_lib();
|
||||||
|
|
||||||
// default auto_update to publisher. TODO: more config here.
|
// default auto_update to publisher
|
||||||
let download_from = metadata.properties.publisher;
|
// let download_from = metadata.properties.publisher.clone();
|
||||||
let current_version = metadata.properties.current_version;
|
let current_version = metadata.properties.current_version;
|
||||||
let code_hashes = metadata.properties.code_hashes;
|
let code_hashes = metadata.properties.code_hashes;
|
||||||
|
|
||||||
|
// Create a HashSet of mirrors including the publisher
|
||||||
|
let mut mirrors = HashSet::new();
|
||||||
|
|
||||||
|
let download_from = if let Some(first_mirror) = metadata.properties.mirrors.first()
|
||||||
|
{
|
||||||
|
first_mirror.clone()
|
||||||
|
} else {
|
||||||
|
"randomnode111.os".to_string()
|
||||||
|
};
|
||||||
|
println!("first_download_from: {download_from}");
|
||||||
|
mirrors.extend(metadata.properties.mirrors.into_iter());
|
||||||
|
mirrors.insert(metadata.properties.publisher.clone());
|
||||||
|
|
||||||
let version_hash = code_hashes
|
let version_hash = code_hashes
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(version, _)| version == ¤t_version)
|
.find(|(version, _)| version == ¤t_version)
|
||||||
.map(|(_, hash)| hash.clone())
|
.map(|(_, hash)| hash.clone())
|
||||||
.ok_or_else(|| anyhow::anyhow!("auto_update: error for package_id: {}, current_version: {}, no matching hash found", process_lib_package_id.to_string(), current_version))?;
|
// note, if this errors, full on failure I thnk no?
|
||||||
|
// and bubble this up.
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("auto_update: error for package_id: {}, current_version: {}, no matching hash found", process_lib_package_id.to_string(), current_version))?;
|
||||||
|
|
||||||
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
&format!(
|
||||||
|
"auto_update: kicking off download for {:?} from {} with version {} from mirror {}",
|
||||||
|
package_id, download_from, version_hash, download_from
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
let download_request = LocalDownloadRequest {
|
let download_request = LocalDownloadRequest {
|
||||||
package_id,
|
package_id,
|
||||||
download_from,
|
download_from: download_from.clone(),
|
||||||
desired_version_hash: version_hash.clone(),
|
desired_version_hash: version_hash.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// kick off local download to ourselves.
|
// Initialize auto-update status with mirrors
|
||||||
Request::to(("our", "downloads", "app-store", "sys"))
|
let key = (process_lib_package_id.clone(), version_hash.clone());
|
||||||
|
auto_updates.insert(
|
||||||
|
key,
|
||||||
|
AutoUpdateStatus {
|
||||||
|
mirrors_left: mirrors,
|
||||||
|
mirrors_failed: Vec::new(),
|
||||||
|
active_mirror: download_from.clone(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// kick off local download to ourselves
|
||||||
|
Request::to(("our", "downloads", "app_store", "sys"))
|
||||||
.body(DownloadRequests::LocalDownload(download_request))
|
.body(DownloadRequests::LocalDownload(download_request))
|
||||||
.send()?;
|
.send()?;
|
||||||
|
|
||||||
auto_updates.insert((process_lib_package_id, version_hash));
|
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -445,18 +493,30 @@ fn handle_message(
|
|||||||
Resp::Download(download_response) => {
|
Resp::Download(download_response) => {
|
||||||
// get context of the response.
|
// get context of the response.
|
||||||
// handled are errors or ok responses from a remote node.
|
// handled are errors or ok responses from a remote node.
|
||||||
|
// check state, do action based on that!
|
||||||
|
|
||||||
if let Some(context) = message.context() {
|
if let Some(context) = message.context() {
|
||||||
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
|
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
|
||||||
match download_response {
|
match download_response {
|
||||||
DownloadResponses::Err(e) => {
|
DownloadResponses::Err(e) => {
|
||||||
Request::to(("our", "main", "app_store", "sys"))
|
print_to_terminal(1, &format!("downloads: got error response: {e:?}"));
|
||||||
.body(DownloadCompleteRequest {
|
let key = (
|
||||||
package_id: download_request.package_id.clone(),
|
download_request.package_id.clone().to_process_lib(),
|
||||||
version_hash: download_request.desired_version_hash.clone(),
|
download_request.desired_version_hash.clone(),
|
||||||
err: Some(e),
|
);
|
||||||
})
|
|
||||||
.send()?;
|
if let Some(metadata) = auto_updates.remove(&key) {
|
||||||
|
try_next_mirror(metadata, key, auto_updates, e);
|
||||||
|
} else {
|
||||||
|
// If not an auto-update, forward error normally
|
||||||
|
Request::to(("our", "main", "app_store", "sys"))
|
||||||
|
.body(DownloadCompleteRequest {
|
||||||
|
package_id: download_request.package_id,
|
||||||
|
version_hash: download_request.desired_version_hash,
|
||||||
|
err: Some(e),
|
||||||
|
})
|
||||||
|
.send()?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
DownloadResponses::Success => {
|
DownloadResponses::Success => {
|
||||||
// todo: maybe we do something here.
|
// todo: maybe we do something here.
|
||||||
@ -477,29 +537,85 @@ fn handle_message(
|
|||||||
return Err(anyhow::anyhow!("http-client response without context"));
|
return Err(anyhow::anyhow!("http-client response without context"));
|
||||||
};
|
};
|
||||||
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
|
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
|
||||||
if let Ok(client::HttpClientResponse::Http(client::HttpResponse {
|
let key = (
|
||||||
status, ..
|
download_request.package_id.clone().to_process_lib(),
|
||||||
})) = resp
|
download_request.desired_version_hash.clone(),
|
||||||
{
|
);
|
||||||
if status == 200 {
|
|
||||||
if let Err(e) = handle_receive_http_download(&download_request) {
|
// Check if this is an auto-update request
|
||||||
print_to_terminal(
|
let is_auto_update = auto_updates.contains_key(&key);
|
||||||
1,
|
let metadata = if is_auto_update {
|
||||||
&format!("error handling http-client response: {:?}", e),
|
auto_updates.remove(&key)
|
||||||
);
|
} else {
|
||||||
Request::to(("our", "main", "app-store", "sys"))
|
None
|
||||||
.body(DownloadRequests::DownloadComplete(
|
};
|
||||||
DownloadCompleteRequest {
|
|
||||||
package_id: download_request.package_id.clone(),
|
// Handle any non-200 response or client error
|
||||||
version_hash: download_request.desired_version_hash.clone(),
|
let Ok(client::HttpClientResponse::Http(resp)) = resp else {
|
||||||
err: Some(e),
|
if let Some(meta) = metadata {
|
||||||
},
|
let error = if let Err(e) = resp {
|
||||||
))
|
format!("HTTP client error: {e:?}")
|
||||||
.send()?;
|
} else {
|
||||||
|
"unexpected response type".to_string()
|
||||||
|
};
|
||||||
|
try_next_mirror(
|
||||||
|
meta,
|
||||||
|
key,
|
||||||
|
auto_updates,
|
||||||
|
DownloadError::HandlingError(error),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
if resp.status != 200 {
|
||||||
|
let error =
|
||||||
|
DownloadError::HandlingError(format!("HTTP status {}", resp.status));
|
||||||
|
handle_download_error(
|
||||||
|
is_auto_update,
|
||||||
|
metadata,
|
||||||
|
key,
|
||||||
|
auto_updates,
|
||||||
|
error,
|
||||||
|
&download_request,
|
||||||
|
)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle successful download
|
||||||
|
if let Err(e) = handle_receive_http_download(&download_request) {
|
||||||
|
print_to_terminal(1, &format!("error handling http_client response: {:?}", e));
|
||||||
|
handle_download_error(
|
||||||
|
is_auto_update,
|
||||||
|
metadata,
|
||||||
|
key,
|
||||||
|
auto_updates,
|
||||||
|
e,
|
||||||
|
&download_request,
|
||||||
|
)?;
|
||||||
|
} else if is_auto_update {
|
||||||
|
match handle_auto_update_success(key.0.clone(), key.1.clone()) {
|
||||||
|
Ok(_) => print_to_terminal(
|
||||||
|
1,
|
||||||
|
&format!(
|
||||||
|
"auto_update: successfully downloaded package {:?} version {}",
|
||||||
|
&download_request.package_id,
|
||||||
|
&download_request.desired_version_hash
|
||||||
|
),
|
||||||
|
),
|
||||||
|
Err(_) => {
|
||||||
|
if let Some(meta) = metadata {
|
||||||
|
try_next_mirror(
|
||||||
|
meta,
|
||||||
|
key,
|
||||||
|
auto_updates,
|
||||||
|
DownloadError::HandlingError(
|
||||||
|
"could not get manifest hash".to_string(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
println!("got http-client error: {resp:?}");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -507,6 +623,70 @@ fn handle_message(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Try the next available mirror for a download, recording the current mirror's failure
|
||||||
|
fn try_next_mirror(
|
||||||
|
mut metadata: AutoUpdateStatus,
|
||||||
|
key: (PackageId, String),
|
||||||
|
auto_updates: &mut AutoUpdates,
|
||||||
|
error: DownloadError,
|
||||||
|
) {
|
||||||
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
&format!(
|
||||||
|
"auto_update: got error from mirror {mirror:?} {error:?}, trying next mirror: {next_mirror:?}",
|
||||||
|
next_mirror = metadata.mirrors_left.iter().next().cloned(),
|
||||||
|
mirror = metadata.active_mirror,
|
||||||
|
error = error
|
||||||
|
),
|
||||||
|
);
|
||||||
|
// Record failure and remove from available mirrors
|
||||||
|
metadata
|
||||||
|
.mirrors_failed
|
||||||
|
.push((metadata.active_mirror.clone(), error));
|
||||||
|
metadata.mirrors_left.remove(&metadata.active_mirror);
|
||||||
|
|
||||||
|
let (package_id, version_hash) = key.clone();
|
||||||
|
|
||||||
|
match metadata.mirrors_left.iter().next().cloned() {
|
||||||
|
Some(next_mirror) => {
|
||||||
|
metadata.active_mirror = next_mirror.clone();
|
||||||
|
auto_updates.insert(key, metadata);
|
||||||
|
Request::to(("our", "downloads", "app_store", "sys"))
|
||||||
|
.body(
|
||||||
|
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
|
||||||
|
package_id: crate::kinode::process::main::PackageId::from_process_lib(
|
||||||
|
package_id,
|
||||||
|
),
|
||||||
|
download_from: next_mirror,
|
||||||
|
desired_version_hash: version_hash.clone(),
|
||||||
|
}))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.send()
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
print_to_terminal(
|
||||||
|
1,
|
||||||
|
"auto_update: no more mirrors to try for package_id {package_id:?}",
|
||||||
|
);
|
||||||
|
// gather, and send error to main.
|
||||||
|
let node_tries = metadata.mirrors_failed;
|
||||||
|
let auto_download_error = AutoDownloadCompleteRequest::Err(AutoDownloadError {
|
||||||
|
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
|
||||||
|
version_hash,
|
||||||
|
tries: node_tries,
|
||||||
|
});
|
||||||
|
|
||||||
|
Request::to(("our", "main", "app_store", "sys"))
|
||||||
|
.body(auto_download_error)
|
||||||
|
.send()
|
||||||
|
.unwrap();
|
||||||
|
auto_updates.remove(&key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn handle_receive_http_download(
|
fn handle_receive_http_download(
|
||||||
download_request: &LocalDownloadRequest,
|
download_request: &LocalDownloadRequest,
|
||||||
) -> anyhow::Result<(), DownloadError> {
|
) -> anyhow::Result<(), DownloadError> {
|
||||||
@ -558,6 +738,46 @@ fn handle_receive_http_download(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_download_error(
|
||||||
|
is_auto_update: bool,
|
||||||
|
metadata: Option<AutoUpdateStatus>,
|
||||||
|
key: (PackageId, String),
|
||||||
|
auto_updates: &mut AutoUpdates,
|
||||||
|
error: impl Into<DownloadError>,
|
||||||
|
download_request: &LocalDownloadRequest,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let error = error.into();
|
||||||
|
if is_auto_update {
|
||||||
|
if let Some(meta) = metadata {
|
||||||
|
try_next_mirror(meta, key, auto_updates, error);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Request::to(("our", "main", "app_store", "sys"))
|
||||||
|
.body(DownloadCompleteRequest {
|
||||||
|
package_id: download_request.package_id.clone(),
|
||||||
|
version_hash: download_request.desired_version_hash.clone(),
|
||||||
|
err: Some(error),
|
||||||
|
})
|
||||||
|
.send()?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle auto-update success case by getting manifest hash and sending completion message
|
||||||
|
fn handle_auto_update_success(package_id: PackageId, version_hash: String) -> anyhow::Result<()> {
|
||||||
|
let manifest_hash = get_manifest_hash(package_id.clone(), version_hash.clone())?;
|
||||||
|
|
||||||
|
Request::to(("our", "main", "app_store", "sys"))
|
||||||
|
.body(AutoDownloadCompleteRequest::Success(AutoDownloadSuccess {
|
||||||
|
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
|
||||||
|
version_hash,
|
||||||
|
manifest_hash,
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn format_entries(entries: Vec<vfs::DirEntry>, state: &State) -> Vec<Entry> {
|
fn format_entries(entries: Vec<vfs::DirEntry>, state: &State) -> Vec<Entry> {
|
||||||
entries
|
entries
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -17,7 +17,6 @@ pub fn spawn_send_transfer(
|
|||||||
our: &Address,
|
our: &Address,
|
||||||
package_id: &PackageId,
|
package_id: &PackageId,
|
||||||
version_hash: &str,
|
version_hash: &str,
|
||||||
timeout: u64,
|
|
||||||
to_addr: &Address,
|
to_addr: &Address,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let transfer_id: u64 = rand::random();
|
let transfer_id: u64 = rand::random();
|
||||||
@ -33,17 +32,14 @@ pub fn spawn_send_transfer(
|
|||||||
return Err(anyhow::anyhow!("failed to spawn ft-worker!"));
|
return Err(anyhow::anyhow!("failed to spawn ft-worker!"));
|
||||||
};
|
};
|
||||||
|
|
||||||
let req = Request::new()
|
let req = Request::new().target((&our.node, worker_process_id)).body(
|
||||||
.target((&our.node, worker_process_id))
|
serde_json::to_vec(&DownloadRequests::RemoteDownload(RemoteDownloadRequest {
|
||||||
.expects_response(timeout + 1)
|
package_id: package_id.clone(),
|
||||||
.body(
|
desired_version_hash: version_hash.to_string(),
|
||||||
serde_json::to_vec(&DownloadRequests::RemoteDownload(RemoteDownloadRequest {
|
worker_address: to_addr.to_string(),
|
||||||
package_id: package_id.clone(),
|
}))
|
||||||
desired_version_hash: version_hash.to_string(),
|
.unwrap(),
|
||||||
worker_address: to_addr.to_string(),
|
);
|
||||||
}))
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
req.send()?;
|
req.send()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -58,7 +54,6 @@ pub fn spawn_receive_transfer(
|
|||||||
package_id: &PackageId,
|
package_id: &PackageId,
|
||||||
version_hash: &str,
|
version_hash: &str,
|
||||||
from_node: &str,
|
from_node: &str,
|
||||||
timeout: u64,
|
|
||||||
) -> anyhow::Result<Address> {
|
) -> anyhow::Result<Address> {
|
||||||
let transfer_id: u64 = rand::random();
|
let transfer_id: u64 = rand::random();
|
||||||
let timer_id = ProcessId::new(Some("timer"), "distro", "sys");
|
let timer_id = ProcessId::new(Some("timer"), "distro", "sys");
|
||||||
@ -75,7 +70,6 @@ pub fn spawn_receive_transfer(
|
|||||||
|
|
||||||
let req = Request::new()
|
let req = Request::new()
|
||||||
.target((&our.node, worker_process_id.clone()))
|
.target((&our.node, worker_process_id.clone()))
|
||||||
.expects_response(timeout + 1)
|
|
||||||
.body(
|
.body(
|
||||||
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
|
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
|
||||||
package_id: package_id.clone(),
|
package_id: package_id.clone(),
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
//!
|
//!
|
||||||
//! - Hash mismatches between the received file and the expected hash are detected and reported.
|
//! - Hash mismatches between the received file and the expected hash are detected and reported.
|
||||||
//! - Various I/O errors are caught and propagated.
|
//! - Various I/O errors are caught and propagated.
|
||||||
|
//! - A 120 second killswitch is implemented to clean up dangling transfers.
|
||||||
//!
|
//!
|
||||||
//! ## Integration with App Store:
|
//! ## Integration with App Store:
|
||||||
//!
|
//!
|
||||||
@ -61,6 +62,7 @@ wit_bindgen::generate!({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const CHUNK_SIZE: u64 = 262144; // 256KB
|
const CHUNK_SIZE: u64 = 262144; // 256KB
|
||||||
|
const KILL_SWITCH_MS: u64 = 120000; // 2 minutes
|
||||||
|
|
||||||
call_init!(init);
|
call_init!(init);
|
||||||
fn init(our: Address) {
|
fn init(our: Address) {
|
||||||
@ -78,8 +80,7 @@ fn init(our: Address) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// killswitch timer, 2 minutes. sender or receiver gets killed/cleaned up.
|
// killswitch timer, 2 minutes. sender or receiver gets killed/cleaned up.
|
||||||
// TODO: killswitch update bubbles up to downloads process?
|
timer::set_timer(KILL_SWITCH_MS, None);
|
||||||
timer::set_timer(120000, None);
|
|
||||||
|
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
@ -105,7 +106,23 @@ fn init(our: Address) {
|
|||||||
start.elapsed().as_millis()
|
start.elapsed().as_millis()
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
Err(e) => print_to_terminal(1, &format!("ft_worker: receive error: {}", e)),
|
Err(e) => {
|
||||||
|
print_to_terminal(1, &format!("ft_worker: receive error: {}", e));
|
||||||
|
// bubble up to parent.
|
||||||
|
// TODO: doublecheck this.
|
||||||
|
// if this fires on a basic timeout, that's bad.
|
||||||
|
Request::new()
|
||||||
|
.body(DownloadRequests::DownloadComplete(
|
||||||
|
DownloadCompleteRequest {
|
||||||
|
package_id: package_id.clone().into(),
|
||||||
|
version_hash: desired_version_hash.to_string(),
|
||||||
|
err: Some(DownloadError::HandlingError(e.to_string())),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.target(parent_process)
|
||||||
|
.send()
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DownloadRequests::RemoteDownload(remote_request) => {
|
DownloadRequests::RemoteDownload(remote_request) => {
|
||||||
@ -187,6 +204,17 @@ fn handle_receiver(
|
|||||||
loop {
|
loop {
|
||||||
let message = await_message()?;
|
let message = await_message()?;
|
||||||
if *message.source() == timer_address {
|
if *message.source() == timer_address {
|
||||||
|
// send error message to downloads process
|
||||||
|
Request::new()
|
||||||
|
.body(DownloadRequests::DownloadComplete(
|
||||||
|
DownloadCompleteRequest {
|
||||||
|
package_id: package_id.clone().into(),
|
||||||
|
version_hash: version_hash.to_string(),
|
||||||
|
err: Some(DownloadError::Timeout),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
.target(parent_process.clone())
|
||||||
|
.send()?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
if !message.is_request() {
|
if !message.is_request() {
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -39,6 +39,7 @@
|
|||||||
"eth:distro:sys",
|
"eth:distro:sys",
|
||||||
"http-server:distro:sys",
|
"http-server:distro:sys",
|
||||||
"http-client:distro:sys",
|
"http-client:distro:sys",
|
||||||
|
"sqlite:distro:sys",
|
||||||
{
|
{
|
||||||
"process": "vfs:distro:sys",
|
"process": "vfs:distro:sys",
|
||||||
"params": {
|
"params": {
|
||||||
@ -52,6 +53,7 @@
|
|||||||
"vfs:distro:sys",
|
"vfs:distro:sys",
|
||||||
"http-client:distro:sys",
|
"http-client:distro:sys",
|
||||||
"eth:distro:sys",
|
"eth:distro:sys",
|
||||||
|
"sqlite:distro:sys",
|
||||||
"timer:distro:sys"
|
"timer:distro:sys"
|
||||||
],
|
],
|
||||||
"public": false
|
"public": false
|
||||||
|
@ -1,11 +1,16 @@
|
|||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { Link } from 'react-router-dom';
|
import { Link, useLocation } from 'react-router-dom';
|
||||||
import { STORE_PATH, PUBLISH_PATH, MY_APPS_PATH } from '../constants/path';
|
import { STORE_PATH, PUBLISH_PATH, MY_APPS_PATH } from '../constants/path';
|
||||||
import { ConnectButton } from '@rainbow-me/rainbowkit';
|
import { ConnectButton } from '@rainbow-me/rainbowkit';
|
||||||
import { FaHome } from "react-icons/fa";
|
import { FaHome } from "react-icons/fa";
|
||||||
import NotificationBay from './NotificationBay';
|
import NotificationBay from './NotificationBay';
|
||||||
|
import useAppsStore from '../store';
|
||||||
|
|
||||||
const Header: React.FC = () => {
|
const Header: React.FC = () => {
|
||||||
|
const location = useLocation();
|
||||||
|
const { updates } = useAppsStore();
|
||||||
|
const updateCount = Object.keys(updates || {}).length;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<header className="app-header">
|
<header className="app-header">
|
||||||
<div className="header-left">
|
<div className="header-left">
|
||||||
@ -15,7 +20,10 @@ const Header: React.FC = () => {
|
|||||||
</button>
|
</button>
|
||||||
<Link to={STORE_PATH} className={location.pathname === STORE_PATH ? 'active' : ''}>Apps</Link>
|
<Link to={STORE_PATH} className={location.pathname === STORE_PATH ? 'active' : ''}>Apps</Link>
|
||||||
<Link to={PUBLISH_PATH} className={location.pathname === PUBLISH_PATH ? 'active' : ''}>Publish</Link>
|
<Link to={PUBLISH_PATH} className={location.pathname === PUBLISH_PATH ? 'active' : ''}>Publish</Link>
|
||||||
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>My Apps</Link>
|
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>
|
||||||
|
My Apps
|
||||||
|
{updateCount > 0 && <span className="update-badge">{updateCount}</span>}
|
||||||
|
</Link>
|
||||||
</nav>
|
</nav>
|
||||||
</div>
|
</div>
|
||||||
<div className="header-right">
|
<div className="header-right">
|
||||||
@ -25,4 +33,5 @@ const Header: React.FC = () => {
|
|||||||
</header>
|
</header>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default Header;
|
export default Header;
|
69
kinode/packages/app-store/ui/src/components/ResetButton.tsx
Normal file
69
kinode/packages/app-store/ui/src/components/ResetButton.tsx
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
import React, { useState } from 'react';
|
||||||
|
import { FaExclamationTriangle } from 'react-icons/fa';
|
||||||
|
import useAppsStore from '../store';
|
||||||
|
|
||||||
|
const ResetButton: React.FC = () => {
|
||||||
|
const resetStore = useAppsStore(state => state.resetStore);
|
||||||
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
|
const handleReset = async () => {
|
||||||
|
try {
|
||||||
|
setIsLoading(true);
|
||||||
|
await resetStore();
|
||||||
|
setIsOpen(false);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Reset failed:', error);
|
||||||
|
alert('Failed to reset the app store. Please try again.');
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<button
|
||||||
|
onClick={() => setIsOpen(true)}
|
||||||
|
className="button danger"
|
||||||
|
style={{ fontSize: '0.9rem' }}
|
||||||
|
>
|
||||||
|
Reset Store
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{isOpen && (
|
||||||
|
<div className="modal-overlay" onClick={() => setIsOpen(false)}>
|
||||||
|
<div className="modal-content" onClick={e => e.stopPropagation()}>
|
||||||
|
<button className="modal-close" onClick={() => setIsOpen(false)}>×</button>
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '0.75rem', marginBottom: '1rem' }}>
|
||||||
|
<FaExclamationTriangle size={24} style={{ color: 'var(--red)' }} />
|
||||||
|
<h3 style={{ margin: 0 }}>Warning</h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p style={{ marginBottom: '1.5rem' }}>
|
||||||
|
This action will re-index all apps and reset the store state.
|
||||||
|
Only proceed if you know what you're doing.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div style={{ display: 'flex', justifyContent: 'flex-end', gap: '0.75rem' }}>
|
||||||
|
<button
|
||||||
|
onClick={() => setIsOpen(false)}
|
||||||
|
className="button"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={handleReset}
|
||||||
|
disabled={isLoading}
|
||||||
|
className="button danger"
|
||||||
|
>
|
||||||
|
{isLoading ? 'Resetting...' : 'Reset Store'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ResetButton;
|
16
kinode/packages/app-store/ui/src/components/Tooltip.tsx
Normal file
16
kinode/packages/app-store/ui/src/components/Tooltip.tsx
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
interface TooltipProps {
|
||||||
|
content: React.ReactNode;
|
||||||
|
children?: React.ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function Tooltip({ content, children }: TooltipProps) {
|
||||||
|
return (
|
||||||
|
<div className="tooltip-container">
|
||||||
|
{children}
|
||||||
|
<span className="tooltip-icon">ⓘ</span>
|
||||||
|
<div className="tooltip-content">{content}</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
@ -2,4 +2,5 @@ export { default as Header } from './Header';
|
|||||||
export { default as MirrorSelector } from './MirrorSelector';
|
export { default as MirrorSelector } from './MirrorSelector';
|
||||||
export { default as PackageSelector } from './PackageSelector';
|
export { default as PackageSelector } from './PackageSelector';
|
||||||
export { default as ManifestDisplay } from './ManifestDisplay';
|
export { default as ManifestDisplay } from './ManifestDisplay';
|
||||||
export { default as NotificationBay } from './NotificationBay';
|
export { default as NotificationBay } from './NotificationBay';
|
||||||
|
export { default as ResetButton } from './ResetButton';
|
@ -1,9 +1,37 @@
|
|||||||
|
:root {
|
||||||
|
/* Core colors */
|
||||||
|
--orange: #ff7e33;
|
||||||
|
--dark-orange: #e56a24;
|
||||||
|
--orange-hover: #ff9900;
|
||||||
|
--red: #e53e3e;
|
||||||
|
--blue: #4299e1;
|
||||||
|
--green: #48bb78;
|
||||||
|
--gray: #718096;
|
||||||
|
|
||||||
|
/* Sophisticated neutrals */
|
||||||
|
--bg-light: #fdf6e3;
|
||||||
|
/* Solarized inspired beige */
|
||||||
|
--bg-dark: #1f1d24;
|
||||||
|
/* Deep slate with hint of purple */
|
||||||
|
--surface-light: #f5efd9;
|
||||||
|
/* Slightly deeper complementary beige */
|
||||||
|
--surface-dark: #2a2832;
|
||||||
|
/* Rich eggplant-tinged dark */
|
||||||
|
--text-light: #2d2a2e;
|
||||||
|
/* Warm charcoal */
|
||||||
|
--text-dark: #e8e6f0;
|
||||||
|
/* Cool moonlight white */
|
||||||
|
|
||||||
|
/* Border radius */
|
||||||
|
--border-radius: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
/* Base styles */
|
/* Base styles */
|
||||||
body {
|
body {
|
||||||
font-family: var(--font-family-main);
|
font-family: var(--font-family-main);
|
||||||
line-height: 1.6;
|
line-height: 1.6;
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
background-color: light-dark(var(--tan), var(--tasteful-dark));
|
background-color: light-dark(var(--bg-light), var(--bg-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Layout */
|
/* Layout */
|
||||||
@ -35,7 +63,7 @@ a:hover {
|
|||||||
|
|
||||||
/* Header */
|
/* Header */
|
||||||
.app-header {
|
.app-header {
|
||||||
background-color: light-dark(var(--off-white), var(--off-black));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
margin-bottom: 1rem;
|
margin-bottom: 1rem;
|
||||||
display: flex;
|
display: flex;
|
||||||
@ -71,12 +99,15 @@ a:hover {
|
|||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
padding: 0.5rem;
|
padding: 0.5rem;
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
|
position: relative;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.header-left nav a:hover,
|
.header-left nav a:hover,
|
||||||
.header-left nav a.active {
|
.header-left nav a.active {
|
||||||
background-color: var(--orange);
|
background-color: var(--orange);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Forms */
|
/* Forms */
|
||||||
@ -91,6 +122,9 @@ form {
|
|||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
margin-bottom: 1rem;
|
margin-bottom: 1rem;
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
padding: 0.75rem;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
}
|
}
|
||||||
|
|
||||||
label {
|
label {
|
||||||
@ -102,15 +136,21 @@ select {
|
|||||||
padding: 0.5rem;
|
padding: 0.5rem;
|
||||||
border: 1px solid var(--gray);
|
border: 1px solid var(--gray);
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Buttons */
|
/* Buttons */
|
||||||
button {
|
button {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
height: 40px;
|
||||||
|
font-weight: 500;
|
||||||
padding: 0.5rem 1rem;
|
padding: 0.5rem 1rem;
|
||||||
background-color: var(--orange);
|
background-color: var(--orange);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
border: none;
|
border: none;
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
@ -125,6 +165,36 @@ button:disabled {
|
|||||||
cursor: not-allowed;
|
cursor: not-allowed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
button.danger {
|
||||||
|
background-color: var(--red);
|
||||||
|
}
|
||||||
|
|
||||||
|
button.danger:hover {
|
||||||
|
background-color: color-mix(in srgb, var(--red) 85%, black);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*Download Button */
|
||||||
|
.download-btn {
|
||||||
|
background: var(--orange);
|
||||||
|
color: var(--text-light);
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-btn:hover {
|
||||||
|
background: var(--dark-orange);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Notification Button */
|
||||||
|
/* .notification-btn {
|
||||||
|
background: var(--surface-dark);
|
||||||
|
color: var(--text);
|
||||||
|
border: 1px solid var(--gray);
|
||||||
|
}
|
||||||
|
|
||||||
|
.notification-btn:hover {
|
||||||
|
background: var(--surface-hover);
|
||||||
|
} */
|
||||||
|
|
||||||
/* Tables */
|
/* Tables */
|
||||||
table {
|
table {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
@ -151,6 +221,9 @@ td {
|
|||||||
|
|
||||||
/* Messages */
|
/* Messages */
|
||||||
.message {
|
.message {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
font-weight: 500;
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
margin-bottom: 1rem;
|
margin-bottom: 1rem;
|
||||||
@ -158,17 +231,18 @@ td {
|
|||||||
|
|
||||||
.message.error {
|
.message.error {
|
||||||
background-color: var(--red);
|
background-color: var(--red);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
}
|
}
|
||||||
|
|
||||||
.message.success {
|
.message.success {
|
||||||
background-color: var(--green);
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: var(--white);
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
|
border: 1px solid var(--green);
|
||||||
}
|
}
|
||||||
|
|
||||||
.message.info {
|
.message.info {
|
||||||
background-color: var(--blue);
|
background-color: var(--blue);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Publisher Info */
|
/* Publisher Info */
|
||||||
@ -242,17 +316,24 @@ td {
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
gap: 0.5rem;
|
gap: 0.5rem;
|
||||||
color: var(--red);
|
color: var(--red);
|
||||||
|
margin-top: 0.5rem;
|
||||||
font-size: 0.9rem;
|
font-size: 0.9rem;
|
||||||
margin-top: 0.25rem;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* App Page and Download Page shared styles */
|
/* Shared page styles */
|
||||||
|
.store-page,
|
||||||
.app-page,
|
.app-page,
|
||||||
.downloads-page {
|
.my-apps-page,
|
||||||
background-color: light-dark(var(--white), var(--maroon));
|
.downloads-page,
|
||||||
|
.publish-page {
|
||||||
|
padding: 1rem;
|
||||||
|
background: light-dark(var(--bg-light), var(--bg-dark));
|
||||||
|
margin: 0 1vw;
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
padding: 2rem;
|
}
|
||||||
width: 100%;
|
|
||||||
|
.app-info {
|
||||||
|
max-width: 20rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
.app-header {
|
.app-header {
|
||||||
@ -268,12 +349,26 @@ td {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.app-info {
|
.app-info {
|
||||||
background-color: light-dark(var(--tan), var(--tasteful-dark));
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
padding: 1.5rem;
|
padding: 1.5rem;
|
||||||
margin-bottom: 2rem;
|
margin-bottom: 2rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Components with secondary backgrounds */
|
||||||
|
.app-header,
|
||||||
|
.app-info,
|
||||||
|
.app-description,
|
||||||
|
.form-group,
|
||||||
|
.search-bar input,
|
||||||
|
.version-selector,
|
||||||
|
.mirror-selector select,
|
||||||
|
.secondary,
|
||||||
|
.message.success {
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark)) !important;
|
||||||
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
|
}
|
||||||
|
|
||||||
/* Download Page specific styles */
|
/* Download Page specific styles */
|
||||||
.download-section {
|
.download-section {
|
||||||
display: flex;
|
display: flex;
|
||||||
@ -289,8 +384,8 @@ td {
|
|||||||
padding: 0.5em;
|
padding: 0.5em;
|
||||||
border: 1px solid var(--gray);
|
border: 1px solid var(--gray);
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Action Buttons */
|
/* Action Buttons */
|
||||||
@ -311,23 +406,23 @@ td {
|
|||||||
|
|
||||||
.primary {
|
.primary {
|
||||||
background-color: var(--orange);
|
background-color: var(--orange);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
}
|
}
|
||||||
|
|
||||||
.primary:hover:not(:disabled) {
|
.primary:hover:not(:disabled) {
|
||||||
background-color: var(--dark-orange);
|
background-color: var(--dark-orange);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
}
|
}
|
||||||
|
|
||||||
.secondary {
|
.secondary {
|
||||||
background-color: light-dark(var(--off-white), var(--off-black));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: var(--orange);
|
color: var(--orange);
|
||||||
border: 2px solid var(--orange);
|
border: 2px solid var(--orange);
|
||||||
}
|
}
|
||||||
|
|
||||||
.secondary:hover:not(:disabled) {
|
.secondary:hover:not(:disabled) {
|
||||||
background-color: var(--orange);
|
background-color: var(--orange);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
}
|
}
|
||||||
|
|
||||||
.action-button:disabled,
|
.action-button:disabled,
|
||||||
@ -337,6 +432,21 @@ td {
|
|||||||
cursor: not-allowed;
|
cursor: not-allowed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.action-button.download-button {
|
||||||
|
background: var(--orange);
|
||||||
|
color: var(--text-light);
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-button.download-button:hover:not(:disabled) {
|
||||||
|
background: var(--dark-orange);
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-button.download-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
/* App actions */
|
/* App actions */
|
||||||
.app-actions {
|
.app-actions {
|
||||||
display: flex;
|
display: flex;
|
||||||
@ -385,8 +495,8 @@ td {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.cap-approval-content {
|
.cap-approval-content {
|
||||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
padding: 2rem;
|
padding: 2rem;
|
||||||
border-radius: 8px;
|
border-radius: 8px;
|
||||||
max-width: 80%;
|
max-width: 80%;
|
||||||
@ -395,8 +505,8 @@ td {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.json-display {
|
.json-display {
|
||||||
background-color: light-dark(var(--tan), var(--off-black));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
white-space: pre-wrap;
|
white-space: pre-wrap;
|
||||||
@ -410,6 +520,44 @@ td {
|
|||||||
margin-top: 1rem;
|
margin-top: 1rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Search bar */
|
||||||
|
.search-bar {
|
||||||
|
width: 100%;
|
||||||
|
margin: 1rem auto 2rem;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-bar input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 0.75rem 1rem 0.75rem 2.5rem;
|
||||||
|
border: 2px solid transparent;
|
||||||
|
border-radius: 2rem;
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
|
font-size: 1rem;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-bar input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--orange);
|
||||||
|
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-bar svg {
|
||||||
|
position: absolute;
|
||||||
|
left: 0.75rem;
|
||||||
|
top: 50%;
|
||||||
|
transform: translateY(-50%);
|
||||||
|
color: var(--gray);
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-bar input::placeholder {
|
||||||
|
color: var(--gray);
|
||||||
|
}
|
||||||
|
|
||||||
/* Responsive adjustments */
|
/* Responsive adjustments */
|
||||||
@media (max-width: 48em) {
|
@media (max-width: 48em) {
|
||||||
|
|
||||||
@ -442,7 +590,7 @@ td {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.manifest-display {
|
.manifest-display {
|
||||||
background: light-dark(var(--white), var(--tasteful-dark));
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
max-width: 600px;
|
max-width: 600px;
|
||||||
@ -450,7 +598,7 @@ td {
|
|||||||
|
|
||||||
.process-manifest {
|
.process-manifest {
|
||||||
margin-bottom: 0.5rem;
|
margin-bottom: 0.5rem;
|
||||||
border: 1px solid light-dark(var(--gray), var(--off-black));
|
border: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
@ -464,12 +612,12 @@ td {
|
|||||||
background: none;
|
background: none;
|
||||||
border: none;
|
border: none;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
transition: background-color 0.2s;
|
transition: background-color 0.2s;
|
||||||
}
|
}
|
||||||
|
|
||||||
.process-header:hover {
|
.process-header:hover {
|
||||||
background: light-dark(var(--tan), var(--off-black));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.process-name {
|
.process-name {
|
||||||
@ -481,7 +629,7 @@ td {
|
|||||||
.process-indicators {
|
.process-indicators {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 0.5rem;
|
gap: 0.5rem;
|
||||||
color: light-dark(var(--gray), var(--off-white));
|
color: light-dark(var(--gray), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.network-icon {
|
.network-icon {
|
||||||
@ -498,8 +646,8 @@ td {
|
|||||||
|
|
||||||
.process-details {
|
.process-details {
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
background: light-dark(var(--tan), var(--off-black));
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
border-top: 1px solid light-dark(var(--gray), var(--off-black));
|
border-top: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.capability-section {
|
.capability-section {
|
||||||
@ -512,13 +660,13 @@ td {
|
|||||||
|
|
||||||
.capability-section h4 {
|
.capability-section h4 {
|
||||||
margin: 0 0 0.5rem 0;
|
margin: 0 0 0.5rem 0;
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.capability-section ul {
|
.capability-section ul {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding-left: 1.5rem;
|
padding-left: 1.5rem;
|
||||||
color: light-dark(var(--gray), var(--off-white));
|
color: light-dark(var(--gray), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.capability-section li {
|
.capability-section li {
|
||||||
@ -538,7 +686,7 @@ td {
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
gap: 0.5rem;
|
gap: 0.5rem;
|
||||||
padding: 0.5rem;
|
padding: 0.5rem;
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.notification-details {
|
.notification-details {
|
||||||
@ -548,7 +696,7 @@ td {
|
|||||||
width: 320px;
|
width: 320px;
|
||||||
max-height: 400px;
|
max-height: 400px;
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
|
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
|
||||||
z-index: 1000;
|
z-index: 1000;
|
||||||
@ -557,7 +705,7 @@ td {
|
|||||||
|
|
||||||
.badge {
|
.badge {
|
||||||
background-color: var(--orange);
|
background-color: var(--orange);
|
||||||
color: var(--white);
|
color: var(--text-light);
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
padding: 0.25rem 0.5rem;
|
padding: 0.25rem 0.5rem;
|
||||||
font-size: 0.75rem;
|
font-size: 0.75rem;
|
||||||
@ -571,8 +719,8 @@ td {
|
|||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
margin: 0.5rem 0;
|
margin: 0.5rem 0;
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
background-color: light-dark(var(--tan), var(--off-black));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
}
|
}
|
||||||
|
|
||||||
.notification-item.error {
|
.notification-item.error {
|
||||||
@ -606,7 +754,7 @@ td {
|
|||||||
background: none;
|
background: none;
|
||||||
border: none;
|
border: none;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
color: light-dark(var(--gray), var(--off-white));
|
color: light-dark(var(--gray), var(--text-dark));
|
||||||
padding: 0.25rem;
|
padding: 0.25rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -617,7 +765,7 @@ td {
|
|||||||
.progress-bar {
|
.progress-bar {
|
||||||
margin-top: 0.5rem;
|
margin-top: 0.5rem;
|
||||||
height: 4px;
|
height: 4px;
|
||||||
background-color: light-dark(var(--white), var(--off-black));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
border-radius: 2px;
|
border-radius: 2px;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
@ -643,8 +791,8 @@ td {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.modal-content {
|
.modal-content {
|
||||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
color: light-dark(var(--off-black), var(--off-white));
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
padding: 1.5rem;
|
padding: 1.5rem;
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
position: relative;
|
position: relative;
|
||||||
@ -660,7 +808,7 @@ td {
|
|||||||
background: none;
|
background: none;
|
||||||
border: none;
|
border: none;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
color: light-dark(var(--gray), var(--off-white));
|
color: light-dark(var(--gray), var(--text-dark));
|
||||||
padding: 0.25rem;
|
padding: 0.25rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -693,7 +841,7 @@ td {
|
|||||||
50%,
|
50%,
|
||||||
70% {
|
70% {
|
||||||
transform: translate3d(-4px, 0, 0);
|
transform: translate3d(-4px, 0, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
40%,
|
40%,
|
||||||
60% {
|
60% {
|
||||||
@ -708,9 +856,511 @@ td {
|
|||||||
|
|
||||||
50% {
|
50% {
|
||||||
opacity: 0.6;
|
opacity: 0.6;
|
||||||
}
|
}
|
||||||
|
|
||||||
100% {
|
100% {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading Spinner */
|
||||||
|
.loading-spinner {
|
||||||
|
display: inline-block;
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
margin-right: 8px;
|
||||||
|
border: 2px solid var(--text-light);
|
||||||
|
border-radius: 50%;
|
||||||
|
border-top-color: transparent;
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading-spinner.small {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
margin-right: 6px;
|
||||||
|
border-width: 1.5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Publish Page */
|
||||||
|
.publish-page {
|
||||||
|
padding: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.publish-page h1 {
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.connect-wallet {
|
||||||
|
text-align: center;
|
||||||
|
padding: 2rem;
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.publish-form {
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
padding: 2rem;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.package-list {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
display: grid;
|
||||||
|
gap: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.package-list li {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 1rem;
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
.package-list .app-name {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 1rem;
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.package-list .app-name:hover {
|
||||||
|
color: var(--orange);
|
||||||
|
}
|
||||||
|
|
||||||
|
.package-icon {
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
.no-packages {
|
||||||
|
text-align: center;
|
||||||
|
padding: 2rem;
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
color: var(--gray);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Update badge */
|
||||||
|
.update-badge {
|
||||||
|
background: var(--red);
|
||||||
|
color: var(--text-light);
|
||||||
|
border-radius: 50%;
|
||||||
|
padding: 0.15rem 0.4rem;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
position: absolute;
|
||||||
|
top: -5px;
|
||||||
|
right: -5px;
|
||||||
|
min-width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Updates section */
|
||||||
|
.updates-section {
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-title {
|
||||||
|
color: var(--orange);
|
||||||
|
font-size: 1.25rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.updates-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-item {
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
overflow: hidden;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-header:hover {
|
||||||
|
background-color: rgba(255, 255, 255, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-title {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.75rem;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-actions .action-button {
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--gray);
|
||||||
|
transition: color 0.2s;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-actions .action-button.retry:hover {
|
||||||
|
color: var(--blue);
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-actions .action-button.clear:hover {
|
||||||
|
color: var(--red);
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-details {
|
||||||
|
padding: 0.75rem 1rem 1rem 2.25rem;
|
||||||
|
border-top: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.version-info {
|
||||||
|
color: var(--gray);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manifest-info {
|
||||||
|
color: var(--orange);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
color: var(--red);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-icon {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* App Page Layout */
|
||||||
|
.app-page {
|
||||||
|
max-width: 80rem;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 2rem 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Updates Section */
|
||||||
|
.updates-section {
|
||||||
|
margin-bottom: 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-item {
|
||||||
|
border: 1px solid transparent;
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
padding: 1rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-summary {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-details {
|
||||||
|
margin-top: 1rem;
|
||||||
|
color: light-dark(var(--text-secondary), var(--text));
|
||||||
|
}
|
||||||
|
|
||||||
|
.retry-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
border-radius: 0.375rem;
|
||||||
|
font-size: 1rem;
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
color: var(--orange);
|
||||||
|
border: 1px solid var(--orange);
|
||||||
|
transition: background-color 0.2s, color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-count {
|
||||||
|
padding: 0.25rem 0.5rem;
|
||||||
|
border-radius: 0.25rem;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
background-color: light-dark(var(--red-100), var(--red-900));
|
||||||
|
color: light-dark(var(--red-700), var(--red-200));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Navigation */
|
||||||
|
.navigation {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4;
|
||||||
|
margin-bottom: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 2;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
border-radius: 0.375rem;
|
||||||
|
font-size: 1rem;
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
color: var(--orange);
|
||||||
|
border: 1px solid var(--orange);
|
||||||
|
transition: background-color 0.2s, color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.current-path {
|
||||||
|
font-size: 1rem;
|
||||||
|
color: light-dark(var(--text-secondary), var(--text));
|
||||||
|
}
|
||||||
|
|
||||||
|
.file-explorer {
|
||||||
|
border: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
|
padding: 1rem;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
}
|
||||||
|
|
||||||
|
.file-explorer h3 {
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
font-size: 1.125rem;
|
||||||
|
font-weight: 500;
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border-bottom: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
|
}
|
||||||
|
|
||||||
|
.downloads-table {
|
||||||
|
width: 100%;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.downloads-table th {
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 500;
|
||||||
|
text-align: left;
|
||||||
|
color: light-dark(var(--text-secondary), var(--text));
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
border-bottom: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
|
}
|
||||||
|
|
||||||
|
.downloads-table td {
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
font-size: 1rem;
|
||||||
|
border-bottom: 1px solid light-dark(var(--gray), var(--surface-dark));
|
||||||
|
}
|
||||||
|
|
||||||
|
.downloads-table tr.file:hover,
|
||||||
|
.downloads-table tr.directory:hover {
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.updates-section {
|
||||||
|
background: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
padding: 1rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-container {
|
||||||
|
position: relative;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-icon {
|
||||||
|
cursor: help;
|
||||||
|
color: #666;
|
||||||
|
font-size: 14px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-content {
|
||||||
|
position: absolute;
|
||||||
|
left: 24px;
|
||||||
|
top: -4px;
|
||||||
|
background: #333;
|
||||||
|
color: white;
|
||||||
|
padding: 8px 12px;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 14px;
|
||||||
|
white-space: nowrap;
|
||||||
|
z-index: 1000;
|
||||||
|
opacity: 0;
|
||||||
|
visibility: hidden;
|
||||||
|
transition: opacity 0.3s ease, visibility 0.3s ease;
|
||||||
|
min-width: max-content;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create an invisible bridge between icon and content */
|
||||||
|
.tooltip-content::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: -20px; /* Cover the gap between icon and content */
|
||||||
|
top: 0;
|
||||||
|
width: 20px;
|
||||||
|
height: 100%;
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-container:hover .tooltip-content {
|
||||||
|
opacity: 1;
|
||||||
|
visibility: visible;
|
||||||
|
transition-delay: 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-content:hover {
|
||||||
|
opacity: 1 !important;
|
||||||
|
visibility: visible !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-content::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: -4px;
|
||||||
|
top: 8px;
|
||||||
|
border-top: 4px solid transparent;
|
||||||
|
border-bottom: 4px solid transparent;
|
||||||
|
border-right: 4px solid #333;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-content a {
|
||||||
|
color: #fff;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tooltip-content a:hover {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wallet-status {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.page-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 1rem;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Modal styles */
|
||||||
|
.modal-overlay {
|
||||||
|
position: fixed;
|
||||||
|
inset: 0;
|
||||||
|
background-color: rgba(0, 0, 0, 0.5);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
z-index: 50;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-container {
|
||||||
|
padding: 1rem;
|
||||||
|
width: 100%;
|
||||||
|
max-width: 28rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content {
|
||||||
|
background-color: light-dark(var(--surface-light), var(--surface-dark));
|
||||||
|
padding: 1.5rem;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.75rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-title {
|
||||||
|
font-size: 1.25rem;
|
||||||
|
font-weight: 500;
|
||||||
|
margin: 0;
|
||||||
|
color: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-description {
|
||||||
|
color: light-dark(var(--text-light), var(--text-dark));
|
||||||
|
margin-bottom: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-footer {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
gap: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Button variants */
|
||||||
|
.danger-button {
|
||||||
|
background-color: var(--red) !important;
|
||||||
|
color: white !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.danger-button:hover {
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.danger-button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
}
|
}
|
@ -148,6 +148,12 @@ export default function AppPage() {
|
|||||||
{latestVersion && (
|
{latestVersion && (
|
||||||
<li><span>Latest Version:</span> <span>{latestVersion}</span></li>
|
<li><span>Latest Version:</span> <span>{latestVersion}</span></li>
|
||||||
)}
|
)}
|
||||||
|
{installedApp?.pending_update_hash && (
|
||||||
|
<li className="warning">
|
||||||
|
<span>Failed Auto-Update:</span>
|
||||||
|
<span>Update to version with hash {installedApp.pending_update_hash.slice(0, 8)}... failed, approve newly requested capabilities and install it here:</span>
|
||||||
|
</li>
|
||||||
|
)}
|
||||||
<li><span>Publisher:</span> <span>{app.package_id.publisher_node}</span></li>
|
<li><span>Publisher:</span> <span>{app.package_id.publisher_node}</span></li>
|
||||||
<li><span>License:</span> <span>{app.metadata?.properties?.license || "Not specified"}</span></li>
|
<li><span>License:</span> <span>{app.metadata?.properties?.license || "Not specified"}</span></li>
|
||||||
<li>
|
<li>
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
import React, { useState, useEffect } from "react";
|
import React, { useState, useEffect } from "react";
|
||||||
import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck, FaTrash } from "react-icons/fa";
|
import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck, FaTrash, FaExclamationTriangle, FaTimesCircle, FaChevronDown, FaChevronRight } from "react-icons/fa";
|
||||||
|
import { useNavigate } from "react-router-dom";
|
||||||
import useAppsStore from "../store";
|
import useAppsStore from "../store";
|
||||||
import { DownloadItem, PackageManifest, PackageState } from "../types/Apps";
|
import { ResetButton} from "../components";
|
||||||
|
import { DownloadItem, PackageManifestEntry, PackageState, Updates, DownloadError, UpdateInfo } from "../types/Apps";
|
||||||
|
|
||||||
// Core packages that cannot be uninstalled
|
// Core packages that cannot be uninstalled
|
||||||
const CORE_PACKAGES = [
|
const CORE_PACKAGES = [
|
||||||
@ -16,6 +18,7 @@ const CORE_PACKAGES = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
export default function MyAppsPage() {
|
export default function MyAppsPage() {
|
||||||
|
const navigate = useNavigate();
|
||||||
const {
|
const {
|
||||||
fetchDownloads,
|
fetchDownloads,
|
||||||
fetchDownloadsForApp,
|
fetchDownloadsForApp,
|
||||||
@ -25,16 +28,20 @@ export default function MyAppsPage() {
|
|||||||
removeDownload,
|
removeDownload,
|
||||||
fetchInstalled,
|
fetchInstalled,
|
||||||
installed,
|
installed,
|
||||||
uninstallApp
|
uninstallApp,
|
||||||
|
fetchUpdates,
|
||||||
|
clearUpdates,
|
||||||
|
updates
|
||||||
} = useAppsStore();
|
} = useAppsStore();
|
||||||
|
|
||||||
const [currentPath, setCurrentPath] = useState<string[]>([]);
|
const [currentPath, setCurrentPath] = useState<string[]>([]);
|
||||||
const [items, setItems] = useState<DownloadItem[]>([]);
|
const [items, setItems] = useState<DownloadItem[]>([]);
|
||||||
|
const [expandedUpdates, setExpandedUpdates] = useState<Set<string>>(new Set());
|
||||||
const [isInstalling, setIsInstalling] = useState(false);
|
const [isInstalling, setIsInstalling] = useState(false);
|
||||||
const [isUninstalling, setIsUninstalling] = useState(false);
|
const [isUninstalling, setIsUninstalling] = useState(false);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const [showCapApproval, setShowCapApproval] = useState(false);
|
const [showCapApproval, setShowCapApproval] = useState(false);
|
||||||
const [manifest, setManifest] = useState<PackageManifest | null>(null);
|
const [manifest, setManifest] = useState<PackageManifestEntry | null>(null);
|
||||||
const [selectedItem, setSelectedItem] = useState<DownloadItem | null>(null);
|
const [selectedItem, setSelectedItem] = useState<DownloadItem | null>(null);
|
||||||
const [showUninstallConfirm, setShowUninstallConfirm] = useState(false);
|
const [showUninstallConfirm, setShowUninstallConfirm] = useState(false);
|
||||||
const [appToUninstall, setAppToUninstall] = useState<any>(null);
|
const [appToUninstall, setAppToUninstall] = useState<any>(null);
|
||||||
@ -42,6 +49,7 @@ export default function MyAppsPage() {
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadItems();
|
loadItems();
|
||||||
fetchInstalled();
|
fetchInstalled();
|
||||||
|
fetchUpdates();
|
||||||
}, [currentPath]);
|
}, [currentPath]);
|
||||||
|
|
||||||
const loadItems = async () => {
|
const loadItems = async () => {
|
||||||
@ -59,34 +67,132 @@ export default function MyAppsPage() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const initiateUninstall = (app: any) => {
|
const handleClearUpdates = async (packageId: string) => {
|
||||||
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
await clearUpdates(packageId);
|
||||||
if (CORE_PACKAGES.includes(packageId)) {
|
fetchUpdates(); // Refresh updates after clearing
|
||||||
setError("Cannot uninstall core system packages");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
setAppToUninstall(app);
|
|
||||||
setShowUninstallConfirm(true);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleUninstall = async () => {
|
const toggleUpdateExpansion = (packageId: string) => {
|
||||||
if (!appToUninstall) return;
|
setExpandedUpdates(prev => {
|
||||||
setIsUninstalling(true);
|
const newSet = new Set(prev);
|
||||||
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
|
if (newSet.has(packageId)) {
|
||||||
try {
|
newSet.delete(packageId);
|
||||||
await uninstallApp(packageId);
|
} else {
|
||||||
await fetchInstalled();
|
newSet.add(packageId);
|
||||||
await loadItems();
|
}
|
||||||
setShowUninstallConfirm(false);
|
return newSet;
|
||||||
setAppToUninstall(null);
|
});
|
||||||
} catch (error) {
|
|
||||||
console.error('Uninstallation failed:', error);
|
|
||||||
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
||||||
} finally {
|
|
||||||
setIsUninstalling(false);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const formatError = (error: DownloadError): string => {
|
||||||
|
if (typeof error === 'string') {
|
||||||
|
return error;
|
||||||
|
} else if ('HashMismatch' in error) {
|
||||||
|
return `Hash mismatch (expected ${error.HashMismatch.desired.slice(0, 8)}, got ${error.HashMismatch.actual.slice(0, 8)})`;
|
||||||
|
} else if ('HandlingError' in error) {
|
||||||
|
return error.HandlingError;
|
||||||
|
} else if ('Timeout' in error) {
|
||||||
|
return 'Connection timed out';
|
||||||
|
}
|
||||||
|
return 'Unknown error';
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderUpdates = () => {
|
||||||
|
if (!updates || Object.keys(updates).length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="updates-section">
|
||||||
|
<h2>Failed Auto Updates (0)</h2>
|
||||||
|
<p>None found, all clear!</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="updates-section">
|
||||||
|
<h2 className="section-title">Failed Auto Updates ({Object.keys(updates).length})</h2>
|
||||||
|
{Object.keys(updates).length > 0 ? (
|
||||||
|
<div className="updates-list">
|
||||||
|
{Object.entries(updates).map(([packageId, versionMap]) => {
|
||||||
|
const totalErrors = Object.values(versionMap).reduce((sum, info) =>
|
||||||
|
sum + (info.errors?.length || 0), 0);
|
||||||
|
const hasManifestChanges = Object.values(versionMap).some(info =>
|
||||||
|
info.pending_manifest_hash);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={packageId} className="update-item error">
|
||||||
|
<div className="update-header" onClick={() => toggleUpdateExpansion(packageId)}>
|
||||||
|
<div className="update-title">
|
||||||
|
{expandedUpdates.has(packageId) ? <FaChevronDown /> : <FaChevronRight />}
|
||||||
|
<FaExclamationTriangle className="error-badge" />
|
||||||
|
<span>{packageId}</span>
|
||||||
|
<div className="update-summary">
|
||||||
|
{totalErrors > 0 && (
|
||||||
|
<span className="error-count">{totalErrors} error{totalErrors !== 1 ? 's' : ''}</span>
|
||||||
|
)}
|
||||||
|
{hasManifestChanges && (
|
||||||
|
<span className="manifest-badge">Manifest changes pending</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="update-actions">
|
||||||
|
<button
|
||||||
|
className="action-button retry"
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
navigate(`/download/${packageId}`);
|
||||||
|
}}
|
||||||
|
title="Retry download"
|
||||||
|
>
|
||||||
|
<FaSync />
|
||||||
|
<span>Retry</span>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="action-button clear"
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
handleClearUpdates(packageId);
|
||||||
|
}}
|
||||||
|
title="Clear update info"
|
||||||
|
>
|
||||||
|
<FaTimesCircle />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{expandedUpdates.has(packageId) && Object.entries(versionMap).map(([versionHash, info]) => (
|
||||||
|
<div key={versionHash} className="update-details">
|
||||||
|
<div className="version-info">
|
||||||
|
Version: {versionHash.slice(0, 8)}...
|
||||||
|
</div>
|
||||||
|
{info.pending_manifest_hash && (
|
||||||
|
<div className="manifest-info">
|
||||||
|
<FaExclamationTriangle />
|
||||||
|
Pending manifest: {info.pending_manifest_hash.slice(0, 8)}...
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{info.errors && info.errors.length > 0 && (
|
||||||
|
<div className="error-list">
|
||||||
|
{info.errors.map(([source, error], idx) => (
|
||||||
|
<div key={idx} className="error-item">
|
||||||
|
<FaExclamationTriangle className="error-icon" />
|
||||||
|
<span>{source}: {formatError(error)}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="empty-state">
|
||||||
|
No failed auto updates found.
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
const navigateToItem = (item: DownloadItem) => {
|
const navigateToItem = (item: DownloadItem) => {
|
||||||
if (item.Dir) {
|
if (item.Dir) {
|
||||||
@ -173,113 +279,153 @@ export default function MyAppsPage() {
|
|||||||
return Object.values(installed).some(app => app.package_id.package_name === packageName);
|
return Object.values(installed).some(app => app.package_id.package_name === packageName);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const initiateUninstall = (app: any) => {
|
||||||
|
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
||||||
|
if (CORE_PACKAGES.includes(packageId)) {
|
||||||
|
setError("Cannot uninstall core system packages");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setAppToUninstall(app);
|
||||||
|
setShowUninstallConfirm(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUninstall = async () => {
|
||||||
|
if (!appToUninstall) return;
|
||||||
|
setIsUninstalling(true);
|
||||||
|
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
|
||||||
|
try {
|
||||||
|
await uninstallApp(packageId);
|
||||||
|
await fetchInstalled();
|
||||||
|
await loadItems();
|
||||||
|
setShowUninstallConfirm(false);
|
||||||
|
setAppToUninstall(null);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Uninstallation failed:', error);
|
||||||
|
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
|
||||||
|
} finally {
|
||||||
|
setIsUninstalling(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="downloads-page">
|
<div className="my-apps-page">
|
||||||
<h2>My Apps</h2>
|
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: '2rem' }}>
|
||||||
|
<h1>My Apps</h1>
|
||||||
|
<ResetButton />
|
||||||
|
</div>
|
||||||
|
{error && <div className="error-message">{error}</div>}
|
||||||
|
{renderUpdates()}
|
||||||
|
|
||||||
{/* Installed Apps Section */}
|
{/* Navigation */}
|
||||||
<div className="file-explorer">
|
<div className="navigation">
|
||||||
<h3>Installed Apps</h3>
|
{currentPath.length > 0 && (
|
||||||
<table className="downloads-table">
|
<button onClick={() => setCurrentPath([])} className="nav-button">
|
||||||
<thead>
|
<FaChevronLeft /> Back
|
||||||
<tr>
|
</button>
|
||||||
<th>Package ID</th>
|
)}
|
||||||
<th>Actions</th>
|
<div className="current-path">
|
||||||
</tr>
|
{currentPath.length === 0 ? 'Downloads' : currentPath.join('/')}
|
||||||
</thead>
|
</div>
|
||||||
<tbody>
|
|
||||||
{Object.values(installed).map((app) => {
|
|
||||||
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
|
||||||
const isCore = CORE_PACKAGES.includes(packageId);
|
|
||||||
return (
|
|
||||||
<tr key={packageId}>
|
|
||||||
<td>{packageId}</td>
|
|
||||||
<td>
|
|
||||||
{isCore ? (
|
|
||||||
<span className="core-package">Core Package</span>
|
|
||||||
) : (
|
|
||||||
<button
|
|
||||||
onClick={() => initiateUninstall(app)}
|
|
||||||
disabled={isUninstalling}
|
|
||||||
>
|
|
||||||
{isUninstalling ? <FaSpinner className="fa-spin" /> : <FaTrash />}
|
|
||||||
Uninstall
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Downloads Section */}
|
{/* Items Table */}
|
||||||
<div className="file-explorer">
|
<div className="items-table-container">
|
||||||
<h3>Downloads</h3>
|
<div className="file-explorer">
|
||||||
<div className="path-navigation">
|
<h3>Installed Apps</h3>
|
||||||
{currentPath.length > 0 && (
|
<table className="downloads-table">
|
||||||
<button onClick={navigateUp} className="navigate-up">
|
<thead>
|
||||||
<FaChevronLeft /> Back
|
<tr>
|
||||||
</button>
|
<th>Package ID</th>
|
||||||
)}
|
<th>Actions</th>
|
||||||
<span className="current-path">/{currentPath.join('/')}</span>
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{Object.values(installed).map((app) => {
|
||||||
|
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
||||||
|
const isCore = CORE_PACKAGES.includes(packageId);
|
||||||
|
return (
|
||||||
|
<tr key={packageId}>
|
||||||
|
<td>{packageId}</td>
|
||||||
|
<td>
|
||||||
|
{isCore ? (
|
||||||
|
<span className="core-package">Core Package</span>
|
||||||
|
) : (
|
||||||
|
<button
|
||||||
|
onClick={() => initiateUninstall(app)}
|
||||||
|
disabled={isUninstalling}
|
||||||
|
>
|
||||||
|
{isUninstalling ? <FaSpinner className="fa-spin" /> : <FaTrash />}
|
||||||
|
Uninstall
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
</div>
|
</div>
|
||||||
<table className="downloads-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Name</th>
|
|
||||||
<th>Type</th>
|
|
||||||
<th>Size</th>
|
|
||||||
<th>Mirroring</th>
|
|
||||||
<th>Actions</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{items.map((item, index) => {
|
|
||||||
const isFile = !!item.File;
|
|
||||||
const name = isFile ? item.File!.name : item.Dir!.name;
|
|
||||||
const isInstalled = isFile && isAppInstalled(name);
|
|
||||||
return (
|
|
||||||
<tr key={index} onClick={() => navigateToItem(item)} className={isFile ? 'file' : 'directory'}>
|
|
||||||
<td>
|
|
||||||
{isFile ? <FaFile /> : <FaFolder />} {name}
|
|
||||||
</td>
|
|
||||||
<td>{isFile ? 'File' : 'Directory'}</td>
|
|
||||||
<td>{isFile ? `${(item.File!.size / 1024).toFixed(2)} KB` : '-'}</td>
|
|
||||||
<td>{!isFile && (item.Dir!.mirroring ? 'Yes' : 'No')}</td>
|
|
||||||
<td>
|
|
||||||
{!isFile && (
|
|
||||||
<button onClick={(e) => { e.stopPropagation(); toggleMirroring(item); }}>
|
|
||||||
<FaSync /> {item.Dir!.mirroring ? 'Stop' : 'Start'} Mirroring
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{isFile && !isInstalled && (
|
|
||||||
<>
|
|
||||||
<button onClick={(e) => { e.stopPropagation(); handleInstall(item); }}>
|
|
||||||
<FaRocket /> Install
|
|
||||||
</button>
|
|
||||||
<button onClick={(e) => { e.stopPropagation(); handleRemoveDownload(item); }}>
|
|
||||||
<FaTrash /> Delete
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
{isFile && isInstalled && (
|
|
||||||
<FaCheck className="installed" />
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{error && (
|
<div className="file-explorer">
|
||||||
<div className="error-message">
|
<h3>Downloads</h3>
|
||||||
{error}
|
<div className="path-navigation">
|
||||||
|
{currentPath.length > 0 && (
|
||||||
|
<button onClick={navigateUp} className="navigate-up">
|
||||||
|
<FaChevronLeft /> Back
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
<span className="current-path">/{currentPath.join('/')}</span>
|
||||||
|
</div>
|
||||||
|
<table className="downloads-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Type</th>
|
||||||
|
<th>Size</th>
|
||||||
|
<th>Mirroring</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{items.map((item, index) => {
|
||||||
|
const isFile = !!item.File;
|
||||||
|
const name = isFile ? item.File!.name : item.Dir!.name;
|
||||||
|
const isInstalled = isFile && isAppInstalled(name);
|
||||||
|
return (
|
||||||
|
<tr key={index} onClick={() => navigateToItem(item)} className={isFile ? 'file' : 'directory'}>
|
||||||
|
<td>
|
||||||
|
{isFile ? <FaFile /> : <FaFolder />} {name}
|
||||||
|
</td>
|
||||||
|
<td>{isFile ? 'File' : 'Directory'}</td>
|
||||||
|
<td>{isFile ? `${(item.File!.size / 1024).toFixed(2)} KB` : '-'}</td>
|
||||||
|
<td>{!isFile && (item.Dir!.mirroring ? 'Yes' : 'No')}</td>
|
||||||
|
<td>
|
||||||
|
{!isFile && (
|
||||||
|
<button onClick={(e) => { e.stopPropagation(); toggleMirroring(item); }}>
|
||||||
|
<FaSync /> {item.Dir!.mirroring ? 'Stop' : 'Start'} Mirroring
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
{isFile && !isInstalled && (
|
||||||
|
<>
|
||||||
|
<button onClick={(e) => { e.stopPropagation(); handleInstall(item); }}>
|
||||||
|
<FaRocket /> Install
|
||||||
|
</button>
|
||||||
|
<button onClick={(e) => { e.stopPropagation(); handleRemoveDownload(item); }}>
|
||||||
|
<FaTrash /> Delete
|
||||||
|
</button>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{isFile && isInstalled && (
|
||||||
|
<FaCheck className="installed" />
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
</div>
|
</div>
|
||||||
)}
|
</div>
|
||||||
|
|
||||||
{/* Uninstall Confirmation Modal */}
|
{/* Uninstall Confirmation Modal */}
|
||||||
{showUninstallConfirm && appToUninstall && (
|
{showUninstallConfirm && appToUninstall && (
|
||||||
@ -318,8 +464,6 @@ export default function MyAppsPage() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
{showCapApproval && manifest && (
|
{showCapApproval && manifest && (
|
||||||
<div className="cap-approval-popup">
|
<div className="cap-approval-popup">
|
||||||
<div className="cap-approval-content">
|
<div className="cap-approval-content">
|
||||||
|
@ -7,12 +7,13 @@ import { mechAbi, KIMAP, encodeIntoMintCall, encodeMulticalls, kimapAbi, MULTICA
|
|||||||
import { kinohash } from '../utils/kinohash';
|
import { kinohash } from '../utils/kinohash';
|
||||||
import useAppsStore from "../store";
|
import useAppsStore from "../store";
|
||||||
import { PackageSelector } from "../components";
|
import { PackageSelector } from "../components";
|
||||||
|
import { Tooltip } from '../components/Tooltip';
|
||||||
|
|
||||||
const NAME_INVALID = "Package name must contain only valid characters (a-z, 0-9, -, and .)";
|
const NAME_INVALID = "Package name must contain only valid characters (a-z, 0-9, -, and .)";
|
||||||
|
|
||||||
export default function PublishPage() {
|
export default function PublishPage() {
|
||||||
const { openConnectModal } = useConnectModal();
|
const { openConnectModal } = useConnectModal();
|
||||||
const { ourApps, fetchOurApps, downloads } = useAppsStore();
|
const { ourApps, fetchOurApps, downloads, fetchDownloadsForApp } = useAppsStore();
|
||||||
const publicClient = usePublicClient();
|
const publicClient = usePublicClient();
|
||||||
|
|
||||||
const { address, isConnected, isConnecting } = useAccount();
|
const { address, isConnected, isConnecting } = useAccount();
|
||||||
@ -23,6 +24,7 @@ export default function PublishPage() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const [packageName, setPackageName] = useState<string>("");
|
const [packageName, setPackageName] = useState<string>("");
|
||||||
|
// @ts-ignore
|
||||||
const [publisherId, setPublisherId] = useState<string>(window.our?.node || "");
|
const [publisherId, setPublisherId] = useState<string>(window.our?.node || "");
|
||||||
const [metadataUrl, setMetadataUrl] = useState<string>("");
|
const [metadataUrl, setMetadataUrl] = useState<string>("");
|
||||||
const [metadataHash, setMetadataHash] = useState<string>("");
|
const [metadataHash, setMetadataHash] = useState<string>("");
|
||||||
@ -34,6 +36,26 @@ export default function PublishPage() {
|
|||||||
fetchOurApps();
|
fetchOurApps();
|
||||||
}, [fetchOurApps]);
|
}, [fetchOurApps]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (packageName && publisherId) {
|
||||||
|
const id = `${packageName}:${publisherId}`;
|
||||||
|
fetchDownloadsForApp(id);
|
||||||
|
}
|
||||||
|
}, [packageName, publisherId, fetchDownloadsForApp]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (isConfirmed) {
|
||||||
|
// Fetch our apps again after successful publish
|
||||||
|
fetchOurApps();
|
||||||
|
// Reset form fields
|
||||||
|
setPackageName("");
|
||||||
|
// @ts-ignore
|
||||||
|
setPublisherId(window.our?.node || "");
|
||||||
|
setMetadataUrl("");
|
||||||
|
setMetadataHash("");
|
||||||
|
}
|
||||||
|
}, [isConfirmed, fetchOurApps]);
|
||||||
|
|
||||||
const validatePackageName = useCallback((name: string) => {
|
const validatePackageName = useCallback((name: string) => {
|
||||||
// Allow lowercase letters, numbers, hyphens, and dots
|
// Allow lowercase letters, numbers, hyphens, and dots
|
||||||
const validNameRegex = /^[a-z0-9.-]+$/;
|
const validNameRegex = /^[a-z0-9.-]+$/;
|
||||||
@ -69,9 +91,12 @@ export default function PublishPage() {
|
|||||||
// Check if code_hashes exist in metadata and is an object
|
// Check if code_hashes exist in metadata and is an object
|
||||||
if (metadata.properties && metadata.properties.code_hashes && typeof metadata.properties.code_hashes === 'object') {
|
if (metadata.properties && metadata.properties.code_hashes && typeof metadata.properties.code_hashes === 'object') {
|
||||||
const codeHashes = metadata.properties.code_hashes;
|
const codeHashes = metadata.properties.code_hashes;
|
||||||
const missingHashes = Object.entries(codeHashes).filter(([version, hash]) =>
|
console.log('Available downloads:', downloads[`${packageName}:${publisherId}`]);
|
||||||
!downloads[`${packageName}:${publisherId}`]?.some(d => d.File?.name === `${hash}.zip`)
|
|
||||||
);
|
const missingHashes = Object.entries(codeHashes).filter(([version, hash]) => {
|
||||||
|
const hasDownload = downloads[`${packageName}:${publisherId}`]?.some(d => d.File?.name === `${hash}.zip`);
|
||||||
|
return !hasDownload;
|
||||||
|
});
|
||||||
|
|
||||||
if (missingHashes.length > 0) {
|
if (missingHashes.length > 0) {
|
||||||
setMetadataError(`Missing local downloads for mirroring versions: ${missingHashes.map(([version]) => version).join(', ')}`);
|
setMetadataError(`Missing local downloads for mirroring versions: ${missingHashes.map(([version]) => version).join(', ')}`);
|
||||||
@ -163,12 +188,6 @@ export default function PublishPage() {
|
|||||||
gas: BigInt(1000000),
|
gas: BigInt(1000000),
|
||||||
});
|
});
|
||||||
|
|
||||||
// Reset form fields
|
|
||||||
setPackageName("");
|
|
||||||
setPublisherId(window.our?.node || "");
|
|
||||||
setMetadataUrl("");
|
|
||||||
setMetadataHash("");
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
console.error(error);
|
||||||
}
|
}
|
||||||
@ -223,22 +242,31 @@ export default function PublishPage() {
|
|||||||
return (
|
return (
|
||||||
<div className="publish-page">
|
<div className="publish-page">
|
||||||
<h1>Publish Package</h1>
|
<h1>Publish Package</h1>
|
||||||
{Boolean(address) && (
|
{!address ? (
|
||||||
<div className="publisher-info">
|
<div className="wallet-status">
|
||||||
<span>Publishing as:</span>
|
<button onClick={() => openConnectModal?.()}>Connect Wallet</button>
|
||||||
<span className="address">{address?.slice(0, 4)}...{address?.slice(-4)}</span>
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="wallet-status">
|
||||||
|
Connected: {address.slice(0, 6)}...{address.slice(-4)}
|
||||||
|
<Tooltip content="Make sure the wallet you're connecting to publish is the same as the owner for the publisher!" />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{isConfirming ? (
|
{isConfirming ? (
|
||||||
<div className="message info">Publishing package...</div>
|
<div className="message info">
|
||||||
|
<div className="loading-spinner"></div>
|
||||||
|
<span>Publishing package...</span>
|
||||||
|
</div>
|
||||||
) : !address || !isConnected ? (
|
) : !address || !isConnected ? (
|
||||||
<>
|
<div className="connect-wallet">
|
||||||
<h4>Please connect your wallet to publish a package</h4>
|
<h4>Please connect your wallet to publish a package</h4>
|
||||||
<ConnectButton />
|
<ConnectButton />
|
||||||
</>
|
</div>
|
||||||
) : isConnecting ? (
|
) : isConnecting ? (
|
||||||
<div className="message info">Approve connection in your wallet</div>
|
<div className="message info">
|
||||||
|
<div className="loading-spinner"></div>
|
||||||
|
<span>Approve connection in your wallet</span>
|
||||||
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<form className="publish-form" onSubmit={publishPackage}>
|
<form className="publish-form" onSubmit={publishPackage}>
|
||||||
<div className="form-group">
|
<div className="form-group">
|
||||||
@ -248,33 +276,36 @@ export default function PublishPage() {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group">
|
<div className="form-group">
|
||||||
<label htmlFor="metadata-url">Metadata URL</label>
|
<div style={{ display: 'flex', alignItems: 'center', gap: '4px' }}>
|
||||||
|
<label>Metadata URL</label>
|
||||||
|
<Tooltip content={<>add a link to metadata.json here (<a href="https://raw.githubusercontent.com/kinode-dao/kit/47cdf82f70b36f2a102ddfaaeed5efa10d7ef5b9/src/new/templates/rust/ui/chat/metadata.json" target="_blank" rel="noopener noreferrer">example link</a>)</>} />
|
||||||
|
</div>
|
||||||
<input
|
<input
|
||||||
id="metadata-url"
|
|
||||||
type="text"
|
type="text"
|
||||||
required
|
|
||||||
value={metadataUrl}
|
value={metadataUrl}
|
||||||
onChange={(e) => setMetadataUrl(e.target.value)}
|
onChange={(e) => setMetadataUrl(e.target.value)}
|
||||||
onBlur={calculateMetadataHash}
|
onBlur={calculateMetadataHash}
|
||||||
placeholder="https://github/my-org/my-repo/metadata.json"
|
|
||||||
/>
|
/>
|
||||||
<p className="help-text">
|
|
||||||
Metadata is a JSON file that describes your package.
|
|
||||||
</p>
|
|
||||||
{metadataError && <p className="error-message">{metadataError}</p>}
|
{metadataError && <p className="error-message">{metadataError}</p>}
|
||||||
</div>
|
</div>
|
||||||
<div className="form-group">
|
<div className="form-group">
|
||||||
<label htmlFor="metadata-hash">Metadata Hash</label>
|
<label>Metadata Hash</label>
|
||||||
<input
|
<input
|
||||||
readOnly
|
readOnly
|
||||||
id="metadata-hash"
|
|
||||||
type="text"
|
type="text"
|
||||||
value={metadataHash}
|
value={metadataHash}
|
||||||
placeholder="Calculated automatically from metadata URL"
|
placeholder="Calculated automatically from metadata URL"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<button type="submit" disabled={isConfirming || nameValidity !== null}>
|
<button type="submit" disabled={isConfirming || nameValidity !== null || Boolean(metadataError)}>
|
||||||
{isConfirming ? 'Publishing...' : 'Publish'}
|
{isConfirming ? (
|
||||||
|
<>
|
||||||
|
<div className="loading-spinner small"></div>
|
||||||
|
<span>Publishing...</span>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
'Publish'
|
||||||
|
)}
|
||||||
</button>
|
</button>
|
||||||
</form>
|
</form>
|
||||||
)}
|
)}
|
||||||
@ -293,21 +324,24 @@ export default function PublishPage() {
|
|||||||
<div className="my-packages">
|
<div className="my-packages">
|
||||||
<h2>Packages You Own</h2>
|
<h2>Packages You Own</h2>
|
||||||
{Object.keys(ourApps).length > 0 ? (
|
{Object.keys(ourApps).length > 0 ? (
|
||||||
<ul>
|
<ul className="package-list">
|
||||||
{Object.values(ourApps).map((app) => (
|
{Object.values(ourApps).map((app) => (
|
||||||
<li key={`${app.package_id.package_name}:${app.package_id.publisher_node}`}>
|
<li key={`${app.package_id.package_name}:${app.package_id.publisher_node}`}>
|
||||||
<Link to={`/app/${app.package_id.package_name}:${app.package_id.publisher_node}`} className="app-name">
|
<Link to={`/app/${app.package_id.package_name}:${app.package_id.publisher_node}`} className="app-name">
|
||||||
{app.metadata?.name || app.package_id.package_name}
|
{app.metadata?.image && (
|
||||||
|
<img src={app.metadata.image} alt="" className="package-icon" />
|
||||||
|
)}
|
||||||
|
<span>{app.metadata?.name || app.package_id.package_name}</span>
|
||||||
</Link>
|
</Link>
|
||||||
|
|
||||||
<button onClick={() => unpublishPackage(app.package_id.package_name, app.package_id.publisher_node)}>
|
<button onClick={() => unpublishPackage(app.package_id.package_name, app.package_id.publisher_node)} className="danger">
|
||||||
Unpublish
|
Unpublish
|
||||||
</button>
|
</button>
|
||||||
</li>
|
</li>
|
||||||
))}
|
))}
|
||||||
</ul>
|
</ul>
|
||||||
) : (
|
) : (
|
||||||
<p>No packages published</p>
|
<p className="no-packages">No packages published</p>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -2,13 +2,15 @@ import React, { useState, useEffect } from "react";
|
|||||||
import useAppsStore from "../store";
|
import useAppsStore from "../store";
|
||||||
import { AppListing } from "../types/Apps";
|
import { AppListing } from "../types/Apps";
|
||||||
import { Link } from "react-router-dom";
|
import { Link } from "react-router-dom";
|
||||||
|
import { FaSearch } from "react-icons/fa";
|
||||||
|
|
||||||
export default function StorePage() {
|
export default function StorePage() {
|
||||||
const { listings, fetchListings } = useAppsStore();
|
const { listings, fetchListings, fetchUpdates } = useAppsStore();
|
||||||
const [searchQuery, setSearchQuery] = useState<string>("");
|
const [searchQuery, setSearchQuery] = useState<string>("");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchListings();
|
fetchListings();
|
||||||
|
fetchUpdates();
|
||||||
}, [fetchListings]);
|
}, [fetchListings]);
|
||||||
|
|
||||||
// extensive temp null handling due to weird prod bug
|
// extensive temp null handling due to weird prod bug
|
||||||
@ -25,12 +27,15 @@ export default function StorePage() {
|
|||||||
return (
|
return (
|
||||||
<div className="store-page">
|
<div className="store-page">
|
||||||
<div className="store-header">
|
<div className="store-header">
|
||||||
<input
|
<div className="search-bar">
|
||||||
type="text"
|
<input
|
||||||
placeholder="Search apps..."
|
type="text"
|
||||||
value={searchQuery}
|
placeholder="Search apps..."
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
value={searchQuery}
|
||||||
/>
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
|
/>
|
||||||
|
<FaSearch />
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="app-list">
|
<div className="app-list">
|
||||||
{!listings ? (
|
{!listings ? (
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { create } from 'zustand'
|
import { create } from 'zustand'
|
||||||
import { persist } from 'zustand/middleware'
|
import { persist } from 'zustand/middleware'
|
||||||
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification } from '../types/Apps'
|
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification, UpdateInfo } from '../types/Apps'
|
||||||
import { HTTP_STATUS } from '../constants/http'
|
import { HTTP_STATUS } from '../constants/http'
|
||||||
import KinodeClientApi from "@kinode/client-api"
|
import KinodeClientApi from "@kinode/client-api"
|
||||||
import { WEBSOCKET_URL } from '../utils/ws'
|
import { WEBSOCKET_URL } from '../utils/ws'
|
||||||
@ -16,6 +16,7 @@ interface AppsStore {
|
|||||||
notifications: Notification[]
|
notifications: Notification[]
|
||||||
homepageApps: HomepageApp[]
|
homepageApps: HomepageApp[]
|
||||||
activeDownloads: Record<string, { downloaded: number, total: number }>
|
activeDownloads: Record<string, { downloaded: number, total: number }>
|
||||||
|
updates: Record<string, UpdateInfo>
|
||||||
|
|
||||||
fetchData: (id: string) => Promise<void>
|
fetchData: (id: string) => Promise<void>
|
||||||
fetchListings: () => Promise<void>
|
fetchListings: () => Promise<void>
|
||||||
@ -26,6 +27,7 @@ interface AppsStore {
|
|||||||
fetchOurApps: () => Promise<void>
|
fetchOurApps: () => Promise<void>
|
||||||
fetchDownloadsForApp: (id: string) => Promise<DownloadItem[]>
|
fetchDownloadsForApp: (id: string) => Promise<DownloadItem[]>
|
||||||
checkMirror: (node: string) => Promise<MirrorCheckFile | null>
|
checkMirror: (node: string) => Promise<MirrorCheckFile | null>
|
||||||
|
resetStore: () => Promise<void>
|
||||||
|
|
||||||
fetchHomepageApps: () => Promise<void>
|
fetchHomepageApps: () => Promise<void>
|
||||||
getLaunchUrl: (id: string) => string | null
|
getLaunchUrl: (id: string) => string | null
|
||||||
@ -48,6 +50,8 @@ interface AppsStore {
|
|||||||
clearActiveDownload: (appId: string) => void
|
clearActiveDownload: (appId: string) => void
|
||||||
clearAllActiveDownloads: () => void;
|
clearAllActiveDownloads: () => void;
|
||||||
|
|
||||||
|
fetchUpdates: () => Promise<void>
|
||||||
|
clearUpdates: (packageId: string) => Promise<void>
|
||||||
}
|
}
|
||||||
|
|
||||||
const useAppsStore = create<AppsStore>()((set, get) => ({
|
const useAppsStore = create<AppsStore>()((set, get) => ({
|
||||||
@ -58,7 +62,7 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
|||||||
activeDownloads: {},
|
activeDownloads: {},
|
||||||
homepageApps: [],
|
homepageApps: [],
|
||||||
notifications: [],
|
notifications: [],
|
||||||
|
updates: {},
|
||||||
|
|
||||||
fetchData: async (id: string) => {
|
fetchData: async (id: string) => {
|
||||||
if (!id) return;
|
if (!id) return;
|
||||||
@ -380,6 +384,55 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
|||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
|
fetchUpdates: async () => {
|
||||||
|
try {
|
||||||
|
const res = await fetch(`${BASE_URL}/updates`);
|
||||||
|
if (res.status === HTTP_STATUS.OK) {
|
||||||
|
const updates = await res.json();
|
||||||
|
set({ updates });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching updates:", error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
clearUpdates: async (packageId: string) => {
|
||||||
|
try {
|
||||||
|
await fetch(`${BASE_URL}/updates/${packageId}/clear`, {
|
||||||
|
method: 'POST',
|
||||||
|
});
|
||||||
|
set((state) => {
|
||||||
|
const newUpdates = { ...state.updates };
|
||||||
|
delete newUpdates[packageId];
|
||||||
|
return { updates: newUpdates };
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error clearing updates:", error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
resetStore: async () => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${BASE_URL}/reset`, {
|
||||||
|
method: 'POST',
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Reset failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Refresh the store data
|
||||||
|
await Promise.all([
|
||||||
|
get().fetchInstalled(),
|
||||||
|
get().fetchListings(),
|
||||||
|
get().fetchUpdates(),
|
||||||
|
]);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Reset failed:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
ws: new KinodeClientApi({
|
ws: new KinodeClientApi({
|
||||||
uri: WEBSOCKET_URL,
|
uri: WEBSOCKET_URL,
|
||||||
nodeId: (window as any).our?.node,
|
nodeId: (window as any).our?.node,
|
||||||
@ -419,10 +472,26 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
|||||||
get().removeNotification(`download-${appId}`);
|
get().removeNotification(`download-${appId}`);
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
|
const formatDownloadError = (error: any): string => {
|
||||||
|
if (typeof error === 'object' && error !== null) {
|
||||||
|
if ('HashMismatch' in error) {
|
||||||
|
const { actual, desired } = error.HashMismatch;
|
||||||
|
return `Hash mismatch: expected ${desired.slice(0, 8)}..., got ${actual.slice(0, 8)}...`;
|
||||||
|
}
|
||||||
|
// Try to serialize the error object if it's not a HashMismatch
|
||||||
|
try {
|
||||||
|
return JSON.stringify(error);
|
||||||
|
} catch {
|
||||||
|
return String(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return String(error);
|
||||||
|
};
|
||||||
|
|
||||||
get().addNotification({
|
get().addNotification({
|
||||||
id: `error-${appId}`,
|
id: `error-${appId}`,
|
||||||
type: 'error',
|
type: 'error',
|
||||||
message: `Download failed for ${package_id.package_name}: ${error}`,
|
message: `Download failed for ${package_id.package_name}: ${formatDownloadError(error)}`,
|
||||||
timestamp: Date.now(),
|
timestamp: Date.now(),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
@ -94,6 +94,35 @@ export interface HomepageApp {
|
|||||||
favorite: boolean;
|
favorite: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface HashMismatch {
|
||||||
|
desired: string;
|
||||||
|
actual: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DownloadError =
|
||||||
|
| "NoPackage"
|
||||||
|
| "NotMirroring"
|
||||||
|
| { HashMismatch: HashMismatch }
|
||||||
|
| "FileNotFound"
|
||||||
|
| "WorkerSpawnFailed"
|
||||||
|
| "HttpClientError"
|
||||||
|
| "BlobNotFound"
|
||||||
|
| "VfsError"
|
||||||
|
| { HandlingError: string }
|
||||||
|
| "Timeout"
|
||||||
|
| "InvalidManifest"
|
||||||
|
| "Offline";
|
||||||
|
|
||||||
|
export interface UpdateInfo {
|
||||||
|
errors: [string, DownloadError][]; // [url/node, error]
|
||||||
|
pending_manifest_hash: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Updates = {
|
||||||
|
[key: string]: { // package_id
|
||||||
|
[key: string]: UpdateInfo; // version_hash -> update info
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
export type NotificationActionType = 'click' | 'modal' | 'popup' | 'redirect';
|
export type NotificationActionType = 'click' | 'modal' | 'popup' | 'redirect';
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
2
kinode/packages/chess/Cargo.lock
generated
2
kinode/packages/chess/Cargo.lock
generated
@ -1813,7 +1813,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
interface chess {
|
interface chess {
|
||||||
/// Our "chess protocol" request/response format. We'll always serialize these
|
/// Our "chess protocol" request/response format. We'll always serialize these
|
||||||
/// to a byte vector and send them over IPC.
|
/// to a byte vector and send them over IPC.
|
||||||
|
|
||||||
variant request {
|
variant request {
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
new-game(new-game-request),
|
new-game(new-game-request),
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
pleco = "0.5"
|
pleco = "0.5"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
2
kinode/packages/contacts/Cargo.lock
generated
2
kinode/packages/contacts/Cargo.lock
generated
@ -1774,7 +1774,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -7,24 +7,29 @@ interface contacts {
|
|||||||
}
|
}
|
||||||
|
|
||||||
variant request {
|
variant request {
|
||||||
|
/// requires ReadNameOnly capability
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
get-names, // requires read-names-only
|
get-names,
|
||||||
|
/// requires Read capability
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
get-all-contacts, // requires read
|
get-all-contacts,
|
||||||
|
/// requires Read capability
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
get-contact(string), // requires read
|
get-contact(string),
|
||||||
|
/// requires Add capability
|
||||||
|
/// lazy-load-blob: none.
|
||||||
|
add-contact(string),
|
||||||
|
/// requires Add capability
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
add-contact(string), // requires add
|
|
||||||
/// tuple<node, field, value>
|
/// tuple<node, field, value>
|
||||||
///
|
add-field(tuple<string, string, string>),
|
||||||
|
/// requires Remove capability
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
add-field(tuple<string, string, string>), // requires add
|
remove-contact(string),
|
||||||
|
/// requires Remove capability
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
remove-contact(string), // requires remove
|
|
||||||
/// tuple<node, field>
|
/// tuple<node, field>
|
||||||
///
|
remove-field(tuple<string, string>),
|
||||||
/// lazy-load-blob: none.
|
|
||||||
remove-field(tuple<string, string>), // requires remove
|
|
||||||
}
|
}
|
||||||
|
|
||||||
variant response {
|
variant response {
|
||||||
@ -43,7 +48,6 @@ interface contacts {
|
|||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
remove-field,
|
remove-field,
|
||||||
/// any failed request will receive this response
|
/// any failed request will receive this response
|
||||||
///
|
|
||||||
/// lazy-load-blob: none.
|
/// lazy-load-blob: none.
|
||||||
err(string),
|
err(string),
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -6,7 +6,7 @@ publish = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
2
kinode/packages/homepage/Cargo.lock
generated
2
kinode/packages/homepage/Cargo.lock
generated
@ -1763,7 +1763,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
37
kinode/packages/kns-indexer/Cargo.lock
generated
37
kinode/packages/kns-indexer/Cargo.lock
generated
@ -1,6 +1,6 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
# This file is automatically @generated by Cargo.
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 4
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "addr2line"
|
name = "addr2line"
|
||||||
@ -1761,7 +1761,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
@ -1917,6 +1917,17 @@ dependencies = [
|
|||||||
"tempfile",
|
"tempfile",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "node_info"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"kinode_process_lib",
|
||||||
|
"process_macros",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"wit-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-bigint"
|
name = "num-bigint"
|
||||||
version = "0.4.6"
|
version = "0.4.6"
|
||||||
@ -2383,6 +2394,17 @@ dependencies = [
|
|||||||
"windows-registry",
|
"windows-registry",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "reset"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"kinode_process_lib",
|
||||||
|
"process_macros",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"wit-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rfc6979"
|
name = "rfc6979"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@ -2776,17 +2798,6 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "state"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"kinode_process_lib",
|
|
||||||
"process_macros",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"wit-bindgen",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "static_assertions"
|
name = "static_assertions"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
|
@ -3,8 +3,9 @@ resolver = "2"
|
|||||||
members = [
|
members = [
|
||||||
"get-block",
|
"get-block",
|
||||||
"kns-indexer",
|
"kns-indexer",
|
||||||
"state",
|
"reset",
|
||||||
]
|
"node-info",
|
||||||
|
]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
|
@ -14,16 +14,16 @@ interface kns-indexer {
|
|||||||
/// returns an Option<KnsUpdate>
|
/// returns an Option<KnsUpdate>
|
||||||
/// set block to 0 if you just want to get the current state of the indexer
|
/// set block to 0 if you just want to get the current state of the indexer
|
||||||
node-info(node-info-request),
|
node-info(node-info-request),
|
||||||
/// return the entire state of the indexer at the given block
|
/// resets and re-indexes the chain, requires root cap,
|
||||||
/// set block to 0 if you just want to get the current state of the indexer
|
/// returns a response varaint reset
|
||||||
get-state(get-state-request),
|
reset,
|
||||||
}
|
}
|
||||||
|
|
||||||
variant indexer-response {
|
variant indexer-response {
|
||||||
name(option<string>),
|
name(option<string>),
|
||||||
node-info(option<wit-kns-update>),
|
node-info(option<wit-kns-update>),
|
||||||
get-state(wit-state),
|
reset(reset-result),
|
||||||
}
|
}
|
||||||
|
|
||||||
record namehash-to-name-request {
|
record namehash-to-name-request {
|
||||||
hash: string,
|
hash: string,
|
||||||
@ -35,10 +35,6 @@ interface kns-indexer {
|
|||||||
block: u64,
|
block: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
record get-state-request {
|
|
||||||
block: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
record wit-kns-update {
|
record wit-kns-update {
|
||||||
name: string,
|
name: string,
|
||||||
public-key: string,
|
public-key: string,
|
||||||
@ -47,12 +43,14 @@ interface kns-indexer {
|
|||||||
routers: list<string>,
|
routers: list<string>,
|
||||||
}
|
}
|
||||||
|
|
||||||
record wit-state {
|
variant reset-result {
|
||||||
chain-id: u64,
|
success,
|
||||||
contract-address: list<u8>, // 20-byte ETH address
|
err(reset-error),
|
||||||
names: list<tuple<string, string>>, // map, but wit doesn't support maps
|
}
|
||||||
nodes: list<tuple<string, wit-kns-update>>, // map, but wit doesn't support maps
|
|
||||||
last-block: u64,
|
variant reset-error {
|
||||||
|
no-root-cap,
|
||||||
|
db-error(string),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -11,7 +11,7 @@ anyhow = "1.0"
|
|||||||
alloy-primitives = "0.8.15"
|
alloy-primitives = "0.8.15"
|
||||||
alloy-sol-types = "0.8.15"
|
alloy-sol-types = "0.8.15"
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
rmp-serde = "1.1.2"
|
rmp-serde = "1.1.2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -1,17 +1,18 @@
|
|||||||
use crate::kinode::process::kns_indexer::{
|
use crate::kinode::process::kns_indexer::{
|
||||||
GetStateRequest, IndexerRequest, IndexerResponse, NamehashToNameRequest, NodeInfoRequest,
|
IndexerRequest, IndexerResponse, NamehashToNameRequest, NodeInfoRequest, ResetError,
|
||||||
WitKnsUpdate, WitState,
|
ResetResult, WitKnsUpdate,
|
||||||
};
|
};
|
||||||
use alloy_primitives::keccak256;
|
use alloy_primitives::keccak256;
|
||||||
use alloy_sol_types::SolEvent;
|
use alloy_sol_types::SolEvent;
|
||||||
use kinode_process_lib::{
|
use kinode_process_lib::{
|
||||||
await_message, call_init, eth, kimap, net, print_to_terminal, println, timer, Address, Message,
|
await_message, call_init, eth, kimap,
|
||||||
Request, Response,
|
kv::{self, Kv},
|
||||||
|
net, print_to_terminal, println, timer, Address, Capability, Message, Request, Response,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::{hash_map::HashMap, BTreeMap},
|
collections::BTreeMap,
|
||||||
net::{IpAddr, Ipv4Addr, Ipv6Addr},
|
net::{IpAddr, Ipv4Addr, Ipv6Addr},
|
||||||
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
wit_bindgen::generate!({
|
wit_bindgen::generate!({
|
||||||
@ -36,57 +37,174 @@ const KIMAP_FIRST_BLOCK: u64 = kimap::KIMAP_FIRST_BLOCK; // optimism
|
|||||||
#[cfg(feature = "simulation-mode")]
|
#[cfg(feature = "simulation-mode")]
|
||||||
const KIMAP_FIRST_BLOCK: u64 = 1; // local
|
const KIMAP_FIRST_BLOCK: u64 = 1; // local
|
||||||
|
|
||||||
|
const CURRENT_VERSION: u32 = 1;
|
||||||
|
|
||||||
const MAX_PENDING_ATTEMPTS: u8 = 3;
|
const MAX_PENDING_ATTEMPTS: u8 = 3;
|
||||||
const SUBSCRIPTION_TIMEOUT: u64 = 60;
|
const SUBSCRIPTION_TIMEOUT: u64 = 60;
|
||||||
const DELAY_MS: u64 = 1_000; // 1s
|
const DELAY_MS: u64 = 1_000; // 1s
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
|
||||||
struct State {
|
struct State {
|
||||||
chain_id: u64,
|
/// version of the state in kv
|
||||||
// what contract this state pertains to
|
version: u32,
|
||||||
contract_address: eth::Address,
|
/// last block we have an update from
|
||||||
// namehash to human readable name
|
|
||||||
names: HashMap<String, String>,
|
|
||||||
// human readable name to most recent on-chain routing information as json
|
|
||||||
nodes: HashMap<String, net::KnsUpdate>,
|
|
||||||
// last block we have an update from
|
|
||||||
last_block: u64,
|
last_block: u64,
|
||||||
|
/// kv handle
|
||||||
|
/// includes keys and values for:
|
||||||
|
/// "meta:chain_id", "meta:version", "meta:last_block", "meta:contract_address",
|
||||||
|
/// "names:{namehash}" -> "{name}", "nodes:{name}" -> "{node_info}"
|
||||||
|
kv: Kv<String, Vec<u8>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<State> for WitState {
|
impl State {
|
||||||
fn from(s: State) -> Self {
|
fn new(our: &Address) -> Self {
|
||||||
let contract_address: [u8; 20] = s.contract_address.into();
|
let kv: Kv<String, Vec<u8>> = match kv::open(our.package_id(), "kns_indexer", Some(10)) {
|
||||||
WitState {
|
Ok(kv) => kv,
|
||||||
chain_id: s.chain_id.clone(),
|
Err(e) => panic!("fatal: error opening kns_indexer key_value database: {e:?}"),
|
||||||
contract_address: contract_address.to_vec(),
|
};
|
||||||
names: s
|
Self {
|
||||||
.names
|
version: CURRENT_VERSION,
|
||||||
.iter()
|
last_block: KIMAP_FIRST_BLOCK,
|
||||||
.map(|(k, v)| (k.clone(), v.clone()))
|
kv,
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
nodes: s
|
|
||||||
.nodes
|
|
||||||
.iter()
|
|
||||||
.map(|(k, v)| (k.clone(), v.clone().into()))
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
last_block: s.last_block.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl From<WitState> for State {
|
/// Loads the state from kv, and updates it with the current block number and version.
|
||||||
fn from(s: WitState) -> Self {
|
/// The result of this function will be that the constants for chain ID and contract address
|
||||||
let contract_address: [u8; 20] = s
|
/// are always matching the values in the kv.
|
||||||
.contract_address
|
fn load(our: &Address) -> Self {
|
||||||
.try_into()
|
let mut state = Self::new(our);
|
||||||
.expect("invalid contract addess: doesn't have 20 bytes");
|
|
||||||
State {
|
let desired_contract_address = eth::Address::from_str(KIMAP_ADDRESS).unwrap();
|
||||||
chain_id: s.chain_id.clone(),
|
|
||||||
contract_address: contract_address.into(),
|
let version = state.get_version();
|
||||||
names: HashMap::from_iter(s.names),
|
let chain_id = state.get_chain_id();
|
||||||
nodes: HashMap::from_iter(s.nodes.iter().map(|(k, v)| (k.clone(), v.clone().into()))),
|
let contract_address = state.get_contract_address();
|
||||||
last_block: s.last_block.clone(),
|
let last_block = state.get_last_block();
|
||||||
|
|
||||||
|
if version != Some(CURRENT_VERSION)
|
||||||
|
|| chain_id != Some(CHAIN_ID)
|
||||||
|
|| contract_address != Some(desired_contract_address)
|
||||||
|
{
|
||||||
|
// if version/contract/chain_id are new, run migrations here.
|
||||||
|
state.set_version(CURRENT_VERSION);
|
||||||
|
state.set_chain_id(CHAIN_ID);
|
||||||
|
state.set_contract_address(desired_contract_address);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
state.last_block = last_block.unwrap_or(state.last_block);
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"\n 🐦⬛ KNS Indexer State\n\
|
||||||
|
▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔\n\
|
||||||
|
Version {}\n\
|
||||||
|
Chain ID {}\n\
|
||||||
|
Last Block {}\n\
|
||||||
|
KIMAP {}\n\
|
||||||
|
▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁\n",
|
||||||
|
state.version, state.last_block, CHAIN_ID, desired_contract_address,
|
||||||
|
);
|
||||||
|
|
||||||
|
state
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset by removing the database and reloading fresh state
|
||||||
|
fn reset(&self, our: &Address) {
|
||||||
|
// Remove the entire database
|
||||||
|
if let Err(e) = kv::remove_db(our.package_id(), "kns_indexer", None) {
|
||||||
|
println!("Warning: error removing kns_indexer database: {e:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn meta_version_key() -> String {
|
||||||
|
"meta:version".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn meta_last_block_key() -> String {
|
||||||
|
"meta:last_block".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn meta_chain_id_key() -> String {
|
||||||
|
"meta:chain_id".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn meta_contract_address_key() -> String {
|
||||||
|
"meta:contract_address".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name_key(namehash: &str) -> String {
|
||||||
|
format!("name:{}", namehash)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn node_key(name: &str) -> String {
|
||||||
|
format!("node:{}", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_last_block(&self) -> Option<u64> {
|
||||||
|
self.kv.get_as::<u64>(&Self::meta_last_block_key()).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_last_block(&mut self, block: u64) {
|
||||||
|
self.kv
|
||||||
|
.set_as::<u64>(&Self::meta_last_block_key(), &block, None)
|
||||||
|
.unwrap();
|
||||||
|
self.last_block = block;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_version(&self) -> Option<u32> {
|
||||||
|
self.kv.get_as::<u32>(&Self::meta_version_key()).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_version(&mut self, version: u32) {
|
||||||
|
self.kv
|
||||||
|
.set_as::<u32>(&Self::meta_version_key(), &version, None)
|
||||||
|
.unwrap();
|
||||||
|
self.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_name(&self, namehash: &str) -> Option<String> {
|
||||||
|
self.kv
|
||||||
|
.get(&Self::name_key(namehash))
|
||||||
|
.ok()
|
||||||
|
.and_then(|bytes| String::from_utf8(bytes).ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_name(&mut self, namehash: &str, name: &str) {
|
||||||
|
self.kv
|
||||||
|
.set(&Self::name_key(namehash), &name.as_bytes().to_vec(), None)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_node(&self, name: &str) -> Option<net::KnsUpdate> {
|
||||||
|
self.kv.get_as::<net::KnsUpdate>(&Self::node_key(name)).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_node(&mut self, name: &str, node: &net::KnsUpdate) {
|
||||||
|
self.kv
|
||||||
|
.set_as::<net::KnsUpdate>(&Self::node_key(name), &node, None)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_chain_id(&self) -> Option<u64> {
|
||||||
|
self.kv.get_as::<u64>(&Self::meta_chain_id_key()).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_chain_id(&mut self, chain_id: u64) {
|
||||||
|
self.kv
|
||||||
|
.set_as::<u64>(&Self::meta_chain_id_key(), &chain_id, None)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_contract_address(&self) -> Option<eth::Address> {
|
||||||
|
self.kv
|
||||||
|
.get_as::<eth::Address>(&Self::meta_contract_address_key())
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_contract_address(&mut self, contract_address: eth::Address) {
|
||||||
|
self.kv
|
||||||
|
.set_as::<eth::Address>(&Self::meta_contract_address_key(), &contract_address, None)
|
||||||
|
.expect("Failed to set contract address");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -126,20 +244,8 @@ enum KnsError {
|
|||||||
|
|
||||||
call_init!(init);
|
call_init!(init);
|
||||||
fn init(our: Address) {
|
fn init(our: Address) {
|
||||||
println!("indexing on contract address {KIMAP_ADDRESS}");
|
// state is loaded from kv, and updated with the current block number and version.
|
||||||
|
let state = State::load(&our);
|
||||||
// we **can** persist PKI state between boots but with current size, it's
|
|
||||||
// more robust just to reload the whole thing. the new contracts will allow
|
|
||||||
// us to quickly verify we have the updated mapping with root hash, but right
|
|
||||||
// now it's tricky to recover from missed events.
|
|
||||||
|
|
||||||
let state = State {
|
|
||||||
chain_id: CHAIN_ID,
|
|
||||||
contract_address: KIMAP_ADDRESS.parse::<eth::Address>().unwrap(),
|
|
||||||
nodes: HashMap::new(),
|
|
||||||
names: HashMap::new(),
|
|
||||||
last_block: KIMAP_FIRST_BLOCK,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(e) = main(our, state) {
|
if let Err(e) = main(our, state) {
|
||||||
println!("fatal error: {e}");
|
println!("fatal error: {e}");
|
||||||
@ -150,42 +256,37 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
|
|||||||
#[cfg(feature = "simulation-mode")]
|
#[cfg(feature = "simulation-mode")]
|
||||||
add_temp_hardcoded_tlzs(&mut state);
|
add_temp_hardcoded_tlzs(&mut state);
|
||||||
|
|
||||||
|
let chain_id = CHAIN_ID;
|
||||||
|
let kimap_address = eth::Address::from_str(KIMAP_ADDRESS).unwrap();
|
||||||
|
|
||||||
// sub_id: 1
|
// sub_id: 1
|
||||||
|
// listen to all mint events in kimap
|
||||||
let mints_filter = eth::Filter::new()
|
let mints_filter = eth::Filter::new()
|
||||||
.address(state.contract_address)
|
.address(kimap_address)
|
||||||
|
.from_block(state.last_block)
|
||||||
.to_block(eth::BlockNumberOrTag::Latest)
|
.to_block(eth::BlockNumberOrTag::Latest)
|
||||||
.event("Mint(bytes32,bytes32,bytes,bytes)");
|
.event("Mint(bytes32,bytes32,bytes,bytes)");
|
||||||
|
|
||||||
let notes = vec![
|
|
||||||
keccak256("~ws-port"),
|
|
||||||
keccak256("~tcp-port"),
|
|
||||||
keccak256("~net-key"),
|
|
||||||
keccak256("~routers"),
|
|
||||||
keccak256("~ip"),
|
|
||||||
];
|
|
||||||
|
|
||||||
// sub_id: 2
|
// sub_id: 2
|
||||||
|
// listen to all note events that are relevant to the KNS protocol within kimap
|
||||||
let notes_filter = eth::Filter::new()
|
let notes_filter = eth::Filter::new()
|
||||||
.address(state.contract_address)
|
.address(kimap_address)
|
||||||
|
.from_block(state.last_block)
|
||||||
.to_block(eth::BlockNumberOrTag::Latest)
|
.to_block(eth::BlockNumberOrTag::Latest)
|
||||||
.event("Note(bytes32,bytes32,bytes,bytes,bytes)")
|
.event("Note(bytes32,bytes32,bytes,bytes,bytes)")
|
||||||
.topic3(notes);
|
.topic3(vec![
|
||||||
|
keccak256("~ws-port"),
|
||||||
|
keccak256("~tcp-port"),
|
||||||
|
keccak256("~net-key"),
|
||||||
|
keccak256("~routers"),
|
||||||
|
keccak256("~ip"),
|
||||||
|
]);
|
||||||
|
|
||||||
// 60s timeout -- these calls can take a long time
|
// 60s timeout -- these calls can take a long time
|
||||||
// if they do time out, we try them again
|
// if they do time out, we try them again
|
||||||
let eth_provider: eth::Provider = eth::Provider::new(state.chain_id, SUBSCRIPTION_TIMEOUT);
|
let eth_provider: eth::Provider = eth::Provider::new(chain_id, SUBSCRIPTION_TIMEOUT);
|
||||||
|
|
||||||
print_to_terminal(
|
|
||||||
1,
|
|
||||||
&format!(
|
|
||||||
"subscribing, state.block: {}, chain_id: {}",
|
|
||||||
state.last_block - 1,
|
|
||||||
state.chain_id
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
// subscribe to logs first, so no logs are missed
|
// subscribe to logs first, so no logs are missed
|
||||||
println!("subscribing to new logs...");
|
|
||||||
eth_provider.subscribe_loop(1, mints_filter.clone());
|
eth_provider.subscribe_loop(1, mints_filter.clone());
|
||||||
eth_provider.subscribe_loop(2, notes_filter.clone());
|
eth_provider.subscribe_loop(2, notes_filter.clone());
|
||||||
|
|
||||||
@ -198,7 +299,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
|
|||||||
let mut pending_notes: BTreeMap<u64, Vec<(kimap::contract::Note, u8)>> = BTreeMap::new();
|
let mut pending_notes: BTreeMap<u64, Vec<(kimap::contract::Note, u8)>> = BTreeMap::new();
|
||||||
|
|
||||||
// if block in state is < current_block, get logs from that part.
|
// if block in state is < current_block, get logs from that part.
|
||||||
println!("syncing old logs...");
|
println!("syncing old logs from block: {}", state.last_block);
|
||||||
fetch_and_process_logs(
|
fetch_and_process_logs(
|
||||||
ð_provider,
|
ð_provider,
|
||||||
&mut state,
|
&mut state,
|
||||||
@ -211,6 +312,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
|
|||||||
notes_filter.clone(),
|
notes_filter.clone(),
|
||||||
&mut pending_notes,
|
&mut pending_notes,
|
||||||
);
|
);
|
||||||
|
|
||||||
// set a timer tick so any pending logs will be processed
|
// set a timer tick so any pending logs will be processed
|
||||||
timer::set_timer(DELAY_MS, None);
|
timer::set_timer(DELAY_MS, None);
|
||||||
println!("done syncing old logs.");
|
println!("done syncing old logs.");
|
||||||
@ -219,9 +321,16 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
|
|||||||
let Ok(message) = await_message() else {
|
let Ok(message) = await_message() else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
// if true, time to go check current block number and handle pending notes.
|
// if true, time to go check current block number and handle pending notes.
|
||||||
let tick = message.is_local(&our) && message.source().process == "timer:distro:sys";
|
let tick = message.is_local(&our) && message.source().process == "timer:distro:sys";
|
||||||
let Message::Request { source, body, .. } = message else {
|
let Message::Request {
|
||||||
|
source,
|
||||||
|
body,
|
||||||
|
capabilities,
|
||||||
|
..
|
||||||
|
} = message
|
||||||
|
else {
|
||||||
if tick {
|
if tick {
|
||||||
handle_eth_message(
|
handle_eth_message(
|
||||||
&mut state,
|
&mut state,
|
||||||
@ -236,7 +345,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
|
|||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if source.process == "eth:distro:sys" {
|
if source.node() == our.node() && source.process == "eth:distro:sys" {
|
||||||
handle_eth_message(
|
handle_eth_message(
|
||||||
&mut state,
|
&mut state,
|
||||||
ð_provider,
|
ð_provider,
|
||||||
@ -247,28 +356,45 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
|
|||||||
¬es_filter,
|
¬es_filter,
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
let request = serde_json::from_slice(&body)?;
|
match serde_json::from_slice(&body)? {
|
||||||
|
|
||||||
match request {
|
|
||||||
IndexerRequest::NamehashToName(NamehashToNameRequest { ref hash, .. }) => {
|
IndexerRequest::NamehashToName(NamehashToNameRequest { ref hash, .. }) => {
|
||||||
// TODO: make sure we've seen the whole block, while actually
|
// TODO: make sure we've seen the whole block, while actually
|
||||||
// sending a response to the proper place.
|
// sending a response to the proper place.
|
||||||
Response::new()
|
Response::new()
|
||||||
.body(IndexerResponse::Name(state.names.get(hash).cloned()))
|
.body(IndexerResponse::Name(state.get_name(hash)))
|
||||||
.send()?;
|
.send()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexerRequest::NodeInfo(NodeInfoRequest { ref name, .. }) => {
|
IndexerRequest::NodeInfo(NodeInfoRequest { ref name, .. }) => {
|
||||||
Response::new()
|
Response::new()
|
||||||
.body(IndexerResponse::NodeInfo(
|
.body(&IndexerResponse::NodeInfo(
|
||||||
state.nodes.get(name).map(|n| n.clone().into()),
|
state
|
||||||
|
.get_node(name)
|
||||||
|
.map(|update| WitKnsUpdate::from(update)),
|
||||||
))
|
))
|
||||||
.send()?;
|
.send()?;
|
||||||
}
|
}
|
||||||
IndexerRequest::GetState(GetStateRequest { .. }) => {
|
IndexerRequest::Reset => {
|
||||||
|
// check for root capability
|
||||||
|
let root_cap = Capability {
|
||||||
|
issuer: our.clone(),
|
||||||
|
params: "{\"root\":true}".to_string(),
|
||||||
|
};
|
||||||
|
if source.package_id() != our.package_id() {
|
||||||
|
if !capabilities.contains(&root_cap) {
|
||||||
|
Response::new()
|
||||||
|
.body(IndexerResponse::Reset(ResetResult::Err(
|
||||||
|
ResetError::NoRootCap,
|
||||||
|
)))
|
||||||
|
.send()?;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// reload state fresh - this will create new db
|
||||||
|
state.reset(&our);
|
||||||
Response::new()
|
Response::new()
|
||||||
.body(IndexerResponse::GetState(state.clone().into()))
|
.body(IndexerResponse::Reset(ResetResult::Success))
|
||||||
.send()?;
|
.send()?;
|
||||||
|
panic!("resetting state, restarting!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -304,11 +430,12 @@ fn handle_eth_message(
|
|||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
if tick {
|
if tick {
|
||||||
let block_number = eth_provider.get_block_number();
|
let block_number = eth_provider.get_block_number();
|
||||||
if let Ok(block_number) = block_number {
|
if let Ok(block_number) = block_number {
|
||||||
print_to_terminal(2, &format!("new block: {}", block_number));
|
print_to_terminal(2, &format!("new block: {}", block_number));
|
||||||
state.last_block = block_number;
|
state.set_last_block(block_number);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
handle_pending_notes(state, pending_notes)?;
|
handle_pending_notes(state, pending_notes)?;
|
||||||
@ -346,15 +473,9 @@ fn handle_pending_notes(
|
|||||||
None => {
|
None => {
|
||||||
print_to_terminal(1, &format!("pending note handling error: {e:?}"))
|
print_to_terminal(1, &format!("pending note handling error: {e:?}"))
|
||||||
}
|
}
|
||||||
Some(ee) => match ee {
|
Some(KnsError::NoParentError) => {
|
||||||
KnsError::NoParentError => {
|
keep_notes.push((note, attempt + 1));
|
||||||
// print_to_terminal(
|
}
|
||||||
// 1,
|
|
||||||
// &format!("note still awaiting mint; attempt {attempt}"),
|
|
||||||
// );
|
|
||||||
keep_notes.push((note, attempt + 1));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -381,69 +502,53 @@ fn handle_note(state: &mut State, note: &kimap::contract::Note) -> anyhow::Resul
|
|||||||
if !kimap::valid_note(¬e_label) {
|
if !kimap::valid_note(¬e_label) {
|
||||||
return Err(anyhow::anyhow!("skipping invalid note: {note_label}"));
|
return Err(anyhow::anyhow!("skipping invalid note: {note_label}"));
|
||||||
}
|
}
|
||||||
|
let Some(node_name) = state.get_name(&node_hash) else {
|
||||||
let Some(node_name) = get_parent_name(&state.names, &node_hash) else {
|
|
||||||
return Err(KnsError::NoParentError.into());
|
return Err(KnsError::NoParentError.into());
|
||||||
};
|
};
|
||||||
|
|
||||||
match note_label.as_str() {
|
if let Some(mut node) = state.get_node(&node_name) {
|
||||||
"~ws-port" => {
|
match note_label.as_str() {
|
||||||
let ws = bytes_to_port(¬e.data)?;
|
"~ws-port" => {
|
||||||
if let Some(node) = state.nodes.get_mut(&node_name) {
|
let ws = bytes_to_port(¬e.data)?;
|
||||||
node.ports.insert("ws".to_string(), ws);
|
node.ports.insert("ws".to_string(), ws);
|
||||||
// port defined, -> direct
|
node.routers = vec![]; // port defined, -> direct
|
||||||
node.routers = vec![];
|
|
||||||
}
|
}
|
||||||
}
|
"~tcp-port" => {
|
||||||
"~tcp-port" => {
|
let tcp = bytes_to_port(¬e.data)?;
|
||||||
let tcp = bytes_to_port(¬e.data)?;
|
|
||||||
if let Some(node) = state.nodes.get_mut(&node_name) {
|
|
||||||
node.ports.insert("tcp".to_string(), tcp);
|
node.ports.insert("tcp".to_string(), tcp);
|
||||||
// port defined, -> direct
|
node.routers = vec![]; // port defined, -> direct
|
||||||
node.routers = vec![];
|
|
||||||
}
|
}
|
||||||
}
|
"~net-key" => {
|
||||||
"~net-key" => {
|
if note.data.len() != 32 {
|
||||||
if note.data.len() != 32 {
|
return Err(anyhow::anyhow!("invalid net-key length"));
|
||||||
return Err(anyhow::anyhow!("invalid net-key length"));
|
}
|
||||||
}
|
|
||||||
if let Some(node) = state.nodes.get_mut(&node_name) {
|
|
||||||
node.public_key = hex::encode(¬e.data);
|
node.public_key = hex::encode(¬e.data);
|
||||||
}
|
}
|
||||||
}
|
"~routers" => {
|
||||||
"~routers" => {
|
let routers = decode_routers(¬e.data, state);
|
||||||
let routers = decode_routers(¬e.data, state);
|
|
||||||
if let Some(node) = state.nodes.get_mut(&node_name) {
|
|
||||||
node.routers = routers;
|
node.routers = routers;
|
||||||
// -> indirect
|
node.ports = BTreeMap::new(); // -> indirect
|
||||||
node.ports = BTreeMap::new();
|
|
||||||
node.ips = vec![];
|
node.ips = vec![];
|
||||||
}
|
}
|
||||||
}
|
"~ip" => {
|
||||||
"~ip" => {
|
let ip = bytes_to_ip(¬e.data)?;
|
||||||
let ip = bytes_to_ip(¬e.data)?;
|
|
||||||
if let Some(node) = state.nodes.get_mut(&node_name) {
|
|
||||||
node.ips = vec![ip.to_string()];
|
node.ips = vec![ip.to_string()];
|
||||||
// -> direct
|
node.routers = vec![]; // -> direct
|
||||||
node.routers = vec![];
|
}
|
||||||
|
_other => {
|
||||||
|
// Ignore unknown notes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_other => {
|
|
||||||
// Ignore unknown notes
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// only send an update if we have a *full* set of data for networking:
|
// Update the node in the state
|
||||||
// a node name, plus either <routers> or <ip, port(s)>
|
state.set_node(&node_name, &node);
|
||||||
if let Some(node_info) = state.nodes.get(&node_name) {
|
|
||||||
if !node_info.public_key.is_empty()
|
// Only send an update if we have a *full* set of data for networking
|
||||||
&& ((!node_info.ips.is_empty() && !node_info.ports.is_empty())
|
if !node.public_key.is_empty()
|
||||||
|| node_info.routers.len() > 0)
|
&& ((!node.ips.is_empty() && !node.ports.is_empty()) || !node.routers.is_empty())
|
||||||
{
|
{
|
||||||
Request::to(("our", "net", "distro", "sys"))
|
Request::to(("our", "net", "distro", "sys"))
|
||||||
.body(rmp_serde::to_vec(&net::NetAction::KnsUpdate(
|
.body(rmp_serde::to_vec(&net::NetAction::KnsUpdate(node))?)
|
||||||
node_info.clone(),
|
|
||||||
))?)
|
|
||||||
.send()?;
|
.send()?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -457,7 +562,7 @@ fn handle_log(
|
|||||||
log: ð::Log,
|
log: ð::Log,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
if let Some(block) = log.block_number {
|
if let Some(block) = log.block_number {
|
||||||
state.last_block = block;
|
state.set_last_block(block);
|
||||||
}
|
}
|
||||||
|
|
||||||
match log.topics()[0] {
|
match log.topics()[0] {
|
||||||
@ -471,15 +576,15 @@ fn handle_log(
|
|||||||
return Err(anyhow::anyhow!("skipping invalid name: {name}"));
|
return Err(anyhow::anyhow!("skipping invalid name: {name}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let full_name = match get_parent_name(&state.names, &parent_hash) {
|
let full_name = match state.get_name(&parent_hash) {
|
||||||
Some(parent_name) => format!("{name}.{parent_name}"),
|
Some(parent_name) => format!("{name}.{parent_name}"),
|
||||||
None => name,
|
None => name,
|
||||||
};
|
};
|
||||||
|
|
||||||
state.names.insert(child_hash.clone(), full_name.clone());
|
state.set_name(&child_hash.clone(), &full_name.clone());
|
||||||
state.nodes.insert(
|
state.set_node(
|
||||||
full_name.clone(),
|
&full_name.clone(),
|
||||||
net::KnsUpdate {
|
&net::KnsUpdate {
|
||||||
name: full_name.clone(),
|
name: full_name.clone(),
|
||||||
public_key: String::new(),
|
public_key: String::new(),
|
||||||
ips: Vec::new(),
|
ips: Vec::new(),
|
||||||
@ -519,18 +624,17 @@ fn handle_log(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
/// Get logs for a filter then process them while taking pending notes into account.
|
||||||
|
|
||||||
fn fetch_and_process_logs(
|
fn fetch_and_process_logs(
|
||||||
eth_provider: ð::Provider,
|
eth_provider: ð::Provider,
|
||||||
state: &mut State,
|
state: &mut State,
|
||||||
filter: eth::Filter,
|
filter: eth::Filter,
|
||||||
pending_notes: &mut BTreeMap<u64, Vec<(kimap::contract::Note, u8)>>,
|
pending_notes: &mut BTreeMap<u64, Vec<(kimap::contract::Note, u8)>>,
|
||||||
) {
|
) {
|
||||||
let filter = filter.from_block(KIMAP_FIRST_BLOCK);
|
|
||||||
loop {
|
loop {
|
||||||
match eth_provider.get_logs(&filter) {
|
match eth_provider.get_logs(&filter) {
|
||||||
Ok(logs) => {
|
Ok(logs) => {
|
||||||
|
println!("log len: {}", logs.len());
|
||||||
for log in logs {
|
for log in logs {
|
||||||
if let Err(e) = handle_log(state, pending_notes, &log) {
|
if let Err(e) = handle_log(state, pending_notes, &log) {
|
||||||
print_to_terminal(1, &format!("log-handling error! {e:?}"));
|
print_to_terminal(1, &format!("log-handling error! {e:?}"));
|
||||||
@ -546,52 +650,23 @@ fn fetch_and_process_logs(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_parent_name(names: &HashMap<String, String>, parent_hash: &str) -> Option<String> {
|
|
||||||
let mut current_hash = parent_hash;
|
|
||||||
let mut components = Vec::new(); // Collect components in a vector
|
|
||||||
let mut visited_hashes = std::collections::HashSet::new();
|
|
||||||
|
|
||||||
while let Some(parent_name) = names.get(current_hash) {
|
|
||||||
if !visited_hashes.insert(current_hash) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !parent_name.is_empty() {
|
|
||||||
components.push(parent_name.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update current_hash to the parent's hash for the next iteration
|
|
||||||
if let Some(new_parent_hash) = names.get(parent_name) {
|
|
||||||
current_hash = new_parent_hash;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if components.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
components.reverse();
|
|
||||||
Some(components.join("."))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TEMP. Either remove when event reimitting working with anvil,
|
// TEMP. Either remove when event reimitting working with anvil,
|
||||||
// or refactor into better structure(!)
|
// or refactor into better structure(!)
|
||||||
#[cfg(feature = "simulation-mode")]
|
#[cfg(feature = "simulation-mode")]
|
||||||
fn add_temp_hardcoded_tlzs(state: &mut State) {
|
fn add_temp_hardcoded_tlzs(state: &mut State) {
|
||||||
// add some hardcoded top level zones
|
// add some hardcoded top level zones
|
||||||
state.names.insert(
|
state.set_name(
|
||||||
"0xdeeac81ae11b64e7cab86d089c306e5d223552a630f02633ce170d2786ff1bbd".to_string(),
|
&"0xdeeac81ae11b64e7cab86d089c306e5d223552a630f02633ce170d2786ff1bbd".to_string(),
|
||||||
"os".to_string(),
|
&"os".to_string(),
|
||||||
);
|
);
|
||||||
state.names.insert(
|
state.set_name(
|
||||||
"0x137d9e4cc0479164d40577620cb3b41b083c6e8dbf58f8523be76d207d6fd8ea".to_string(),
|
&"0x137d9e4cc0479164d40577620cb3b41b083c6e8dbf58f8523be76d207d6fd8ea".to_string(),
|
||||||
"dev".to_string(),
|
&"dev".to_string(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decodes bytes into an array of keccak256 hashes (32 bytes each) and returns their full names.
|
/// Decodes bytes under ~routers in kimap into an array of keccak256 hashes (32 bytes each)
|
||||||
|
/// and returns the associated node identities.
|
||||||
fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
|
fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
|
||||||
if data.len() % 32 != 0 {
|
if data.len() % 32 != 0 {
|
||||||
print_to_terminal(
|
print_to_terminal(
|
||||||
@ -605,7 +680,7 @@ fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
|
|||||||
for chunk in data.chunks(32) {
|
for chunk in data.chunks(32) {
|
||||||
let hash_str = format!("0x{}", hex::encode(chunk));
|
let hash_str = format!("0x{}", hex::encode(chunk));
|
||||||
|
|
||||||
match state.names.get(&hash_str) {
|
match state.get_name(&hash_str) {
|
||||||
Some(full_name) => routers.push(full_name.clone()),
|
Some(full_name) => routers.push(full_name.clone()),
|
||||||
None => print_to_terminal(
|
None => print_to_terminal(
|
||||||
1,
|
1,
|
||||||
@ -617,6 +692,7 @@ fn decode_routers(data: &[u8], state: &State) -> Vec<String> {
|
|||||||
routers
|
routers
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// convert IP address stored at ~ip in kimap to IpAddr
|
||||||
pub fn bytes_to_ip(bytes: &[u8]) -> anyhow::Result<IpAddr> {
|
pub fn bytes_to_ip(bytes: &[u8]) -> anyhow::Result<IpAddr> {
|
||||||
match bytes.len() {
|
match bytes.len() {
|
||||||
4 => {
|
4 => {
|
||||||
@ -633,6 +709,7 @@ pub fn bytes_to_ip(bytes: &[u8]) -> anyhow::Result<IpAddr> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// convert port stored at ~[protocol]-port in kimap to u16
|
||||||
pub fn bytes_to_port(bytes: &[u8]) -> anyhow::Result<u16> {
|
pub fn bytes_to_port(bytes: &[u8]) -> anyhow::Result<u16> {
|
||||||
match bytes.len() {
|
match bytes.len() {
|
||||||
2 => Ok(u16::from_be_bytes([bytes[0], bytes[1]])),
|
2 => Ok(u16::from_be_bytes([bytes[0], bytes[1]])),
|
||||||
|
20
kinode/packages/kns-indexer/node-info/Cargo.toml
Normal file
20
kinode/packages/kns-indexer/node-info/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
[package]
|
||||||
|
name = "node_info"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
simulation-mode = []
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
process_macros = "0.1"
|
||||||
|
wit-bindgen = "0.36.0"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
crate-type = ["cdylib"]
|
||||||
|
|
||||||
|
[package.metadata.component]
|
||||||
|
package = "kinode:process"
|
35
kinode/packages/kns-indexer/node-info/src/lib.rs
Normal file
35
kinode/packages/kns-indexer/node-info/src/lib.rs
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
use kinode::process::kns_indexer::{IndexerRequest, IndexerResponse, NodeInfoRequest};
|
||||||
|
use kinode_process_lib::{println, script, Address, Request};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
wit_bindgen::generate!({
|
||||||
|
path: "target/wit",
|
||||||
|
world: "kns-indexer-sys-v0",
|
||||||
|
generate_unused_types: true,
|
||||||
|
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
|
||||||
|
});
|
||||||
|
|
||||||
|
script!(init);
|
||||||
|
fn init(_our: Address, args: String) -> String {
|
||||||
|
let node_name = args.split_whitespace().next().unwrap_or("").to_string();
|
||||||
|
|
||||||
|
let kns = Address::from_str("our@kns-indexer:kns-indexer:sys").unwrap();
|
||||||
|
|
||||||
|
let resp = Request::to(kns)
|
||||||
|
.body(IndexerRequest::NodeInfo(NodeInfoRequest {
|
||||||
|
name: node_name,
|
||||||
|
block: 0,
|
||||||
|
}))
|
||||||
|
.send_and_await_response(5)
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let resp = serde_json::from_slice::<IndexerResponse>(&resp.body()).unwrap();
|
||||||
|
|
||||||
|
match resp {
|
||||||
|
IndexerResponse::NodeInfo(node_info) => {
|
||||||
|
format!("node info: {node_info:#?}")
|
||||||
|
}
|
||||||
|
_ => "node info: name not found".to_string(),
|
||||||
|
}
|
||||||
|
}
|
@ -8,12 +8,14 @@
|
|||||||
"eth:distro:sys",
|
"eth:distro:sys",
|
||||||
"http-server:distro:sys",
|
"http-server:distro:sys",
|
||||||
"net:distro:sys",
|
"net:distro:sys",
|
||||||
"timer:distro:sys"
|
"timer:distro:sys",
|
||||||
|
"kv:distro:sys"
|
||||||
],
|
],
|
||||||
"grant_capabilities": [
|
"grant_capabilities": [
|
||||||
"eth:distro:sys",
|
"eth:distro:sys",
|
||||||
"http-server:distro:sys",
|
"http-server:distro:sys",
|
||||||
"timer:distro:sys"
|
"timer:distro:sys",
|
||||||
|
"kv:distro:sys"
|
||||||
],
|
],
|
||||||
"public": false
|
"public": false
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,25 @@
|
|||||||
],
|
],
|
||||||
"wit_version": 1
|
"wit_version": 1
|
||||||
},
|
},
|
||||||
"state.wasm": {
|
"reset.wasm": {
|
||||||
|
"root": false,
|
||||||
|
"public": false,
|
||||||
|
"request_networking": false,
|
||||||
|
"request_capabilities": [
|
||||||
|
"kns-indexer:kns-indexer:sys",
|
||||||
|
{
|
||||||
|
"process": "kns-indexer:kns-indexer:sys",
|
||||||
|
"params": {
|
||||||
|
"root": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"grant_capabilities": [
|
||||||
|
"kns-indexer:kns-indexer:sys"
|
||||||
|
],
|
||||||
|
"wit_version": 1
|
||||||
|
},
|
||||||
|
"node-info.wasm": {
|
||||||
"root": false,
|
"root": false,
|
||||||
"public": false,
|
"public": false,
|
||||||
"request_networking": false,
|
"request_networking": false,
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "state"
|
name = "reset"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
@ -7,10 +7,10 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
process_macros = "0.1"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
23
kinode/packages/kns-indexer/reset/src/lib.rs
Normal file
23
kinode/packages/kns-indexer/reset/src/lib.rs
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use kinode::process::kns_indexer::IndexerRequest;
|
||||||
|
use kinode_process_lib::{call_init, Address, Request};
|
||||||
|
|
||||||
|
wit_bindgen::generate!({
|
||||||
|
path: "target/wit",
|
||||||
|
world: "kns-indexer-sys-v0",
|
||||||
|
generate_unused_types: true,
|
||||||
|
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
|
||||||
|
});
|
||||||
|
|
||||||
|
call_init!(init);
|
||||||
|
fn init(_our: Address) {
|
||||||
|
// request timeout of 5s
|
||||||
|
let kns = Address::from_str("our@kns-indexer:kns-indexer:sys").unwrap();
|
||||||
|
|
||||||
|
let _resp = Request::to(kns)
|
||||||
|
.body(IndexerRequest::Reset)
|
||||||
|
.send_and_await_response(5)
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
}
|
@ -1,46 +0,0 @@
|
|||||||
use crate::kinode::process::kns_indexer::{GetStateRequest, IndexerRequest, IndexerResponse};
|
|
||||||
use kinode_process_lib::{eth, script, Address, Message, Request};
|
|
||||||
|
|
||||||
wit_bindgen::generate!({
|
|
||||||
path: "target/wit",
|
|
||||||
world: "kns-indexer-sys-v0",
|
|
||||||
generate_unused_types: true,
|
|
||||||
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
|
|
||||||
});
|
|
||||||
|
|
||||||
script!(init);
|
|
||||||
fn init(_our: Address, _args: String) -> String {
|
|
||||||
// we don't take any args
|
|
||||||
|
|
||||||
let Ok(Message::Response { body, .. }) =
|
|
||||||
Request::to(("our", "kns-indexer", "kns-indexer", "sys"))
|
|
||||||
.body(IndexerRequest::GetState(GetStateRequest { block: 0 }))
|
|
||||||
.send_and_await_response(10)
|
|
||||||
.unwrap()
|
|
||||||
else {
|
|
||||||
return "failed to get state from kns-indexer".to_string();
|
|
||||||
};
|
|
||||||
let Ok(IndexerResponse::GetState(state)) = body.try_into() else {
|
|
||||||
return "failed to deserialize state".to_string();
|
|
||||||
};
|
|
||||||
// can change later, but for now, just print every known node name
|
|
||||||
let mut names = state
|
|
||||||
.names
|
|
||||||
.iter()
|
|
||||||
.map(|(_k, v)| v.clone())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
names.sort();
|
|
||||||
let contract_address: [u8; 20] = state
|
|
||||||
.contract_address
|
|
||||||
.try_into()
|
|
||||||
.expect("invalid contract addess: doesn't have 20 bytes");
|
|
||||||
let contract_address: eth::Address = contract_address.into();
|
|
||||||
format!(
|
|
||||||
"\nrunning on chain id {}\nCA: {}\n{} known nodes as of block {}\n {}",
|
|
||||||
state.chain_id,
|
|
||||||
contract_address,
|
|
||||||
names.len(),
|
|
||||||
state.last_block,
|
|
||||||
names.join("\n ")
|
|
||||||
)
|
|
||||||
}
|
|
2
kinode/packages/settings/Cargo.lock
generated
2
kinode/packages/settings/Cargo.lock
generated
@ -1751,7 +1751,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -19,6 +19,13 @@
|
|||||||
},
|
},
|
||||||
"http-server:distro:sys",
|
"http-server:distro:sys",
|
||||||
"kernel:distro:sys",
|
"kernel:distro:sys",
|
||||||
|
"kns-indexer:kns-indexer:sys",
|
||||||
|
{
|
||||||
|
"process": "kns-indexer:kns-indexer:sys",
|
||||||
|
"params": {
|
||||||
|
"root": true
|
||||||
|
}
|
||||||
|
},
|
||||||
"net:distro:sys",
|
"net:distro:sys",
|
||||||
"vfs:distro:sys",
|
"vfs:distro:sys",
|
||||||
{
|
{
|
||||||
|
@ -165,7 +165,7 @@
|
|||||||
margin-left: 6px;
|
margin-left: 6px;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
<script type="module" crossorigin src="/settings:settings:sys/assets/index-BVR8Atdy.js"></script>
|
<script type="module" crossorigin src="/settings:settings:sys/assets/index-CwCaX2Ut.js"></script>
|
||||||
<link rel="stylesheet" crossorigin href="/settings:settings:sys/assets/index-iGirBDd0.css">
|
<link rel="stylesheet" crossorigin href="/settings:settings:sys/assets/index-iGirBDd0.css">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ simulation-mode = []
|
|||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
base64 = "0.22.0"
|
base64 = "0.22.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
rmp-serde = "1.2.0"
|
rmp-serde = "1.2.0"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use kinode_process_lib::{
|
use kinode_process_lib::{
|
||||||
await_message, call_init, eth, get_blob, homepage, http, kernel_types, kimap, net, println,
|
await_message, call_init, eth, get_blob, get_capability, homepage, http, kernel_types, kimap,
|
||||||
Address, Capability, LazyLoadBlob, Message, NodeId, ProcessId, Request, Response, SendError,
|
net, println, Address, Capability, LazyLoadBlob, Message, NodeId, ProcessId, Request, Response,
|
||||||
SendErrorKind,
|
SendError, SendErrorKind,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::{collections::HashMap, vec};
|
||||||
|
|
||||||
const ICON: &str = include_str!("icon");
|
const ICON: &str = include_str!("icon");
|
||||||
|
|
||||||
@ -18,6 +18,7 @@ enum SettingsRequest {
|
|||||||
PeerId(NodeId),
|
PeerId(NodeId),
|
||||||
EthConfig(eth::EthConfigAction),
|
EthConfig(eth::EthConfigAction),
|
||||||
Shutdown,
|
Shutdown,
|
||||||
|
Reset,
|
||||||
KillProcess(ProcessId),
|
KillProcess(ProcessId),
|
||||||
SetStylesheet(String),
|
SetStylesheet(String),
|
||||||
}
|
}
|
||||||
@ -464,6 +465,19 @@ fn handle_settings_request(
|
|||||||
.send()
|
.send()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
SettingsRequest::Reset => {
|
||||||
|
// reset KNS
|
||||||
|
let kns_address = Address::new(&state.our.node, ("kns-indexer", "kns-indexer", "sys"));
|
||||||
|
let root_cap = get_capability(&kns_address, "{\"root\":true}");
|
||||||
|
|
||||||
|
if let Some(cap) = root_cap {
|
||||||
|
Request::to(("our", "kns-indexer", "kns-indexer", "sys"))
|
||||||
|
.body(serde_json::to_vec(&SettingsRequest::Reset).unwrap())
|
||||||
|
.capabilities(vec![cap])
|
||||||
|
.send()
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
SettingsRequest::KillProcess(pid) => {
|
SettingsRequest::KillProcess(pid) => {
|
||||||
// kill a process
|
// kill a process
|
||||||
if let Err(_) = Request::to(("our", "kernel", "distro", "sys"))
|
if let Err(_) = Request::to(("our", "kernel", "distro", "sys"))
|
||||||
|
@ -80,6 +80,11 @@ function App() {
|
|||||||
setTimeout(() => window.location.reload(), 1000);
|
setTimeout(() => window.location.reload(), 1000);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleReset = () => {
|
||||||
|
apiCall("Reset");
|
||||||
|
setTimeout(() => window.location.reload(), 1000);
|
||||||
|
};
|
||||||
|
|
||||||
const handleSaveStylesheet = () => {
|
const handleSaveStylesheet = () => {
|
||||||
const stylesheet = (document.getElementById('stylesheet-editor') as HTMLTextAreaElement).value;
|
const stylesheet = (document.getElementById('stylesheet-editor') as HTMLTextAreaElement).value;
|
||||||
apiCall({ "SetStylesheet": stylesheet });
|
apiCall({ "SetStylesheet": stylesheet });
|
||||||
@ -141,7 +146,20 @@ function App() {
|
|||||||
<p id="net-key">{appState.identity?.networking_key}</p>
|
<p id="net-key">{appState.identity?.networking_key}</p>
|
||||||
{appState.identity?.ws_routing && <p id="ip-ports">{appState.identity.ws_routing}</p>}
|
{appState.identity?.ws_routing && <p id="ip-ports">{appState.identity.ws_routing}</p>}
|
||||||
{appState.identity?.routers && <p id="routers">{appState.identity.routers}</p>}
|
{appState.identity?.routers && <p id="routers">{appState.identity.routers}</p>}
|
||||||
<button id="shutdown" onClick={handleShutdown}>shut down node(!)</button>
|
<div className="mt-16 flex flex-col justify-start">
|
||||||
|
<button
|
||||||
|
onClick={handleShutdown}
|
||||||
|
className="bg-red-500 hover:bg-red-600 text-white font-bold py-2 px-4 rounded w-full mb-8"
|
||||||
|
>
|
||||||
|
Shutdown Node
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={handleReset}
|
||||||
|
className="bg-yellow-500 hover:bg-yellow-600 text-white font-bold py-2 px-4 rounded w-full"
|
||||||
|
>
|
||||||
|
Reset KNS State
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
</article>
|
</article>
|
||||||
|
|
||||||
<article id="pings">
|
<article id="pings">
|
||||||
|
2
kinode/packages/terminal/Cargo.lock
generated
2
kinode/packages/terminal/Cargo.lock
generated
@ -1919,7 +1919,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
rmp-serde = "1.1.2"
|
rmp-serde = "1.1.2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
clap = "4.4"
|
clap = "4.4"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
regex = "1.10.3"
|
regex = "1.10.3"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
rmp-serde = "1.1.2"
|
rmp-serde = "1.1.2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
rmp-serde = "1.1.2"
|
rmp-serde = "1.1.2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||||||
simulation-mode = []
|
simulation-mode = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
rmp-serde = "1.1.2"
|
rmp-serde = "1.1.2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
@ -20,6 +20,12 @@
|
|||||||
"http-client:distro:sys",
|
"http-client:distro:sys",
|
||||||
"kernel:distro:sys",
|
"kernel:distro:sys",
|
||||||
"kns-indexer:kns-indexer:sys",
|
"kns-indexer:kns-indexer:sys",
|
||||||
|
{
|
||||||
|
"process": "kns-indexer:kns-indexer:sys",
|
||||||
|
"params": {
|
||||||
|
"root": true
|
||||||
|
}
|
||||||
|
},
|
||||||
"kv:distro:sys",
|
"kv:distro:sys",
|
||||||
"net:distro:sys",
|
"net:distro:sys",
|
||||||
"sqlite:distro:sys",
|
"sqlite:distro:sys",
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
regex = "1.10.3"
|
regex = "1.10.3"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
clap = "4.4"
|
clap = "4.4"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
wit-bindgen = "0.36.0"
|
wit-bindgen = "0.36.0"
|
||||||
|
2
kinode/packages/tester/Cargo.lock
generated
2
kinode/packages/tester/Cargo.lock
generated
@ -1751,7 +1751,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "kinode_process_lib"
|
name = "kinode_process_lib"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
source = "git+https://github.com/kinode-dao/process_lib?rev=0443ece#0443ece2a5dfdbdc1b40db454a1535e1b1c1a1b3"
|
source = "git+https://github.com/kinode-dao/process_lib?rev=d97e012#d97e012842dd4cc0e036d5de5048064e770302ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "0443ece" }
|
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "d97e012" }
|
||||||
process_macros = "0.1"
|
process_macros = "0.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
@ -156,6 +156,9 @@
|
|||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/argon2-browser/1.18.0/argon2-bundled.min.js"
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/argon2-browser/1.18.0/argon2-bundled.min.js"
|
||||||
integrity="sha512-Alrh8vbmKDc5xiq7I/y8LTDwy9nw1nT9S/yR73HMMoWrpX4S1kizNPdWM896c/CDIGILNwAiaih627A94kRhYQ=="
|
integrity="sha512-Alrh8vbmKDc5xiq7I/y8LTDwy9nw1nT9S/yR73HMMoWrpX4S1kizNPdWM896c/CDIGILNwAiaih627A94kRhYQ=="
|
||||||
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||||
|
<!-- REMOVE IN 1.0.0 -->
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/crypto-js/4.2.0/crypto-js.min.js"></script>
|
||||||
|
<!--------------------->
|
||||||
<script>
|
<script>
|
||||||
let isInitialized = false;
|
let isInitialized = false;
|
||||||
|
|
||||||
@ -207,6 +210,24 @@
|
|||||||
if (result.status == 200) {
|
if (result.status == 200) {
|
||||||
window.location.reload();
|
window.location.reload();
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
|
// REMOVE IN 1.0.0
|
||||||
|
const hashHex = '0x' + CryptoJS.SHA256(password).toString(CryptoJS.enc.Hex);
|
||||||
|
const result = await fetch("/login", {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
password_hash: hashHex,
|
||||||
|
subdomain: isSecureSubdomain ? firstPathItem : '',
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
if (result.status == 200) {
|
||||||
|
window.location.reload();
|
||||||
|
} else {
|
||||||
|
throw new Error("Login failed");
|
||||||
|
}
|
||||||
|
// END REMOVE IN 1.0.0
|
||||||
|
|
||||||
throw new Error("Login failed");
|
throw new Error("Login failed");
|
||||||
}
|
}
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
|
@ -874,8 +874,20 @@ async fn login_with_password(
|
|||||||
|
|
||||||
let password_hash_hex = format!("0x{}", password_hash);
|
let password_hash_hex = format!("0x{}", password_hash);
|
||||||
|
|
||||||
let k = keygen::decode_keyfile(&disk_keyfile, &password_hash_hex)
|
// SWITCH BACK TO THIS IN 1.0.0
|
||||||
.expect("could not decode keyfile, password incorrect");
|
// let k = keygen::decode_keyfile(&disk_keyfile, &password_hash_hex)
|
||||||
|
// .expect("could not decode keyfile, password incorrect");
|
||||||
|
|
||||||
|
// REMOVE IN 1.0.0
|
||||||
|
let k = match keygen::decode_keyfile(&disk_keyfile, &password_hash_hex) {
|
||||||
|
Ok(k) => k,
|
||||||
|
Err(_) => {
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
let password_hash = format!("0x{}", hex::encode(Sha256::digest(password)));
|
||||||
|
keygen::decode_keyfile(&disk_keyfile, &password_hash)
|
||||||
|
.expect("could not decode keyfile, password incorrect")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut our = Identity {
|
let mut our = Identity {
|
||||||
name: k.username.clone(),
|
name: k.username.clone(),
|
||||||
|
@ -7,6 +7,8 @@ import {
|
|||||||
import { PageProps } from "../lib/types";
|
import { PageProps } from "../lib/types";
|
||||||
import Loader from "../components/Loader";
|
import Loader from "../components/Loader";
|
||||||
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
||||||
|
// REMOVE IN 1.0.0
|
||||||
|
import { sha256, toBytes } from "viem";
|
||||||
|
|
||||||
interface ImportKeyfileProps extends PageProps { }
|
interface ImportKeyfileProps extends PageProps { }
|
||||||
|
|
||||||
@ -69,7 +71,28 @@ function ImportKeyfile({
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (result.status > 399) {
|
if (result.status > 399) {
|
||||||
throw new Error(await result.text());
|
|
||||||
|
// REMOVE IN 1.0.0
|
||||||
|
let hashed_password = sha256(toBytes(pw));
|
||||||
|
const result = await fetch("/import-keyfile", {
|
||||||
|
method: "POST",
|
||||||
|
credentials: 'include',
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
keyfile: Buffer.from(localKey).toString('utf8'),
|
||||||
|
password_hash: hashed_password,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.status > 399) {
|
||||||
|
throw new Error("Incorrect password");
|
||||||
|
} else {
|
||||||
|
redirectToHomepage();
|
||||||
|
}
|
||||||
|
// END REMOVE IN 1.0.0
|
||||||
|
|
||||||
|
// BRING BACK IN 1.0.0
|
||||||
|
// throw new Error(await result.text());
|
||||||
}
|
}
|
||||||
redirectToHomepage();
|
redirectToHomepage();
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
|
@ -4,6 +4,8 @@ import Loader from "../components/Loader";
|
|||||||
import { useNavigate } from "react-router-dom";
|
import { useNavigate } from "react-router-dom";
|
||||||
import { Tooltip } from "../components/Tooltip";
|
import { Tooltip } from "../components/Tooltip";
|
||||||
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
||||||
|
// REMOVE IN 1.0.0
|
||||||
|
import { sha256, toBytes } from "viem";
|
||||||
|
|
||||||
interface LoginProps extends PageProps { }
|
interface LoginProps extends PageProps { }
|
||||||
|
|
||||||
@ -54,7 +56,27 @@ function Login({
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (result.status > 399) {
|
if (result.status > 399) {
|
||||||
throw new Error(await result.text());
|
|
||||||
|
// REMOVE IN 1.0.0
|
||||||
|
let hashed_password = sha256(toBytes(pw));
|
||||||
|
const result = await fetch(
|
||||||
|
"/login",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
credentials: 'include',
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify({ password_hash: hashed_password }),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
if (result.status > 399) {
|
||||||
|
throw new Error(await result.text());
|
||||||
|
} else {
|
||||||
|
redirectToHomepage();
|
||||||
|
}
|
||||||
|
// END REMOVE IN 1.0.0
|
||||||
|
|
||||||
|
// BRING BACK IN 1.0.0
|
||||||
|
// throw new Error(await result.text());
|
||||||
}
|
}
|
||||||
redirectToHomepage();
|
redirectToHomepage();
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
|
@ -83,10 +83,10 @@ impl SqliteState {
|
|||||||
|
|
||||||
fs::create_dir_all(&db_path).await?;
|
fs::create_dir_all(&db_path).await?;
|
||||||
|
|
||||||
let db_file_path = format!("{}.db", db);
|
let db_file_path = db_path.join(format!("{}.db", db));
|
||||||
|
|
||||||
let db_conn = Connection::open(db_file_path)?;
|
let db_conn = Connection::open(db_file_path)?;
|
||||||
let _ = db_conn.execute("PRAGMA journal_mode=WAL", []);
|
let _: String = db_conn.query_row("PRAGMA journal_mode=WAL", [], |row| row.get(0))?;
|
||||||
|
|
||||||
self.open_dbs.insert(key, Mutex::new(db_conn));
|
self.open_dbs.insert(key, Mutex::new(db_conn));
|
||||||
|
|
||||||
|
6
package-lock.json
generated
Normal file
6
package-lock.json
generated
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"name": "kinode",
|
||||||
|
"lockfileVersion": 2,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user