Merge pull request #643 from kinode-dao/develop

Develop 0.10.0
This commit is contained in:
doria 2024-12-23 13:40:16 -05:00 committed by GitHub
commit da11125989
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
254 changed files with 35271 additions and 16800 deletions

3
.gitignore vendored
View File

@ -16,8 +16,9 @@ wit/
kinode/src/bootstrapped_processes.rs
kinode/packages/**/wasi_snapshot_preview1.wasm
kinode/packages/app_store/pkg/ui/*
kinode/packages/app-store/pkg/ui/*
kinode/packages/homepage/pkg/ui/*
kinode/packages/settings/pkg/ui/*
kinode/src/register-ui/build/
kinode/src/register-ui/dist/
kinode/packages/docs/pkg/ui

3153
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
[package]
name = "kinode_lib"
authors = ["KinodeDAO"]
version = "0.9.10"
authors = ["Sybil Technologies AG"]
version = "0.10.0"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
@ -14,21 +14,20 @@ lib = { path = "lib" }
[workspace]
members = [
"lib", "kinode",
"kinode/packages/app_store/app_store", "kinode/packages/app_store/ft_worker",
"kinode/packages/app_store/download", "kinode/packages/app_store/install", "kinode/packages/app_store/uninstall", "kinode/packages/app_store/downloads", "kinode/packages/app_store/chain",
"kinode/packages/app-store/app-store", "kinode/packages/app-store/ft-worker",
"kinode/packages/app-store/download", "kinode/packages/app-store/install", "kinode/packages/app-store/uninstall", "kinode/packages/app-store/downloads", "kinode/packages/app-store/chain",
"kinode/packages/chess/chess",
"kinode/packages/contacts/contacts",
"kinode/packages/homepage/homepage",
"kinode/packages/kino_updates/blog", "kinode/packages/kino_updates/globe",
"kinode/packages/kns_indexer/kns_indexer", "kinode/packages/kns_indexer/get_block", "kinode/packages/kns_indexer/state",
"kinode/packages/settings/settings",
"kinode/packages/kns-indexer/kns-indexer", "kinode/packages/kns-indexer/get-block", "kinode/packages/settings/settings", "kinode/packages/kns-indexer/reset",
"kinode/packages/kns-indexer/node-info", "kinode/packages/kns-indexer/state",
"kinode/packages/terminal/terminal",
"kinode/packages/terminal/alias", "kinode/packages/terminal/cat", "kinode/packages/terminal/echo",
"kinode/packages/terminal/help", "kinode/packages/terminal/hi", "kinode/packages/terminal/kfetch",
"kinode/packages/terminal/kill", "kinode/packages/terminal/m", "kinode/packages/terminal/top",
"kinode/packages/terminal/net_diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers",
"kinode/packages/terminal/net-diagnostics", "kinode/packages/terminal/peer", "kinode/packages/terminal/peers",
"kinode/packages/tester/tester",
"scripts/build_packages",
"scripts/build-packages",
]
default-members = ["lib"]
resolver = "2"

View File

@ -16,9 +16,26 @@ FROM downloader_${TARGETARCH} AS downloader
FROM debian:12-slim
RUN apt-get update && apt-get install openssl -y
# Create a non-root user and group
RUN groupadd -r kinode && \
useradd -r -g kinode -d /kinode-home/home/kinode kinode
RUN apt-get update && \
apt-get install openssl -y && \
rm -rf /var/lib/apt/lists/*
# Create directory for kinode and set permissions
RUN mkdir -p /kinode-home/home/kinode && \
chown -R kinode:kinode /kinode-home
COPY --from=downloader /tmp/download/kinode /bin/kinode
RUN chown kinode:kinode /bin/kinode && \
chmod 755 /bin/kinode
# Switch to non-root user
USER kinode
WORKDIR /kinode-home
ENTRYPOINT [ "/bin/kinode" ]
CMD [ "/kinode-home" ]

View File

@ -38,7 +38,7 @@ cargo install cargo-wasi
# Install NPM so we can build frontends for "distro" packages.
# https://docs.npmjs.com/downloading-and-installing-node-js-and-npm
# If you want to skip this step, build the packages with `cargo run -p build_packages -- --skip-build-frontend` to neglect building the frontends
# If you want to skip this step, build the packages with `cargo run -p build-packages -- --skip-build-frontend` to neglect building the frontends
# Build the "distro" Wasm modules, then, build the runtime.
# The compiled packages will be at `kinode/target/packages.zip`.
@ -46,7 +46,8 @@ cargo install cargo-wasi
# OPTIONAL: --release flag (slower build; faster runtime; binary at `kinode/target/release/kinode`).
cd kinode
cargo run -p build_packages
cargo run -p build-packages
# OPTIONAL: --release flag
cargo build -p kinode
```
@ -54,15 +55,17 @@ cargo build -p kinode
## Security Status
No security audits of this crate have ever been performed. This software is under active development and should be **used at your own risk**.
This software is under active development and should be **used at your own risk**.
A security audit targeting the networking protocol, web interface, and kernel architecture was performed by [Enigma Dark](https://www.enigmadark.com/).
That report can be found [here](https://github.com/Enigma-Dark/security-review-reports/blob/main/2024-11-18_Architecture_Review_Report_Kinode.pdf).
## Boot
Make sure not to use the same home directory for two nodes at once! You can use any name for the home directory: here we just use `home`. The `--` here separates cargo arguments from binary arguments.
```bash
# OPTIONAL: --release flag
cargo +nightly run -p kinode -- home
cargo run -p kinode -- home
```
On boot you will be prompted to navigate to `localhost:8080` or whatever HTTP port your node bound to: it will try 8080 and go up from there, or use the port passed with the `--port` boot flag. Make sure your browser wallet matches the network that the node is being booted on. Follow the registration UI -- if you want to register a new ID you will either need Optimism ETH or an invite code.
@ -108,14 +111,14 @@ You can also do the same thing by using the `--rpc` boot flag with an Optimism W
The base OS install comes with certain runtime modules. These are interacted with in the same way as userspace processes, but are deeply ingrained to the system and the APIs they present at their Process IDs are assumed to be available by userspace processes. All of these are identified in the `distro:sys` package.
This distribution of the OS also comes with userspace packages pre-installed. Some of these packages are intimately tied to the runtime: `terminal`, `homepage`, and `kns_indexer`. Modifying, removing or replacing the distro userspace packages should only be done in highly specialized use-cases.
This distribution of the OS also comes with userspace packages pre-installed. Some of these packages are intimately tied to the runtime: `terminal`, `homepage`, and `kns-indexer`. Modifying, removing or replacing the distro userspace packages should only be done in highly specialized use-cases.
The runtime distro processes are:
- `eth:distro:sys`
- `fd_manager:distro:sys`
- `http_client:distro:sys`
- `http_server:distro:sys`
- `fd-manager:distro:sys`
- `http-client:distro:sys`
- `http-server:distro:sys`
- `kernel:distro:sys`
- `kv:distro:sys`
- `net:distro:sys`
@ -127,12 +130,11 @@ The runtime distro processes are:
The distro userspace packages are:
- `app_store:sys`
- `app-store:sys`
- `chess:sys`
- `contacts:sys`
- `homepage:sys`
- `kino_updates:sys`
- `kns_indexer:sys`
- `kns-indexer:sys`
- `settings:sys`
- `terminal:sys`
- `tester:sys` (used with `kit` for running test suites, only installed in `simulation-mode`)
@ -154,6 +156,8 @@ The `sys` publisher is not a real node ID, but it's also not a special case valu
- UpArrow/DownArrow or CTRL+P/CTRL+N to move up and down through command history
- CTRL+R to search history, CTRL+R again to toggle through search results, CTRL+G to cancel search
- CTRL+W to set process-level verbosities that override the verbosity mode set with CTRL+V (0-3, 0 is default and lowest verbosity)
### Built-in terminal scripts
The terminal package contains a number of built-in scripts.
@ -167,7 +171,7 @@ Subsequent use of the shorthand will then be interpolated as the process ID.
A list of the terminal scripts included in this distro:
- `alias <shorthand> <process_id>`: create an alias for a script.
- Example: `alias get_block get_block:kns_indexer:sys`
- Example: `alias get_block get-block:kns-indexer:sys`
- note: all of these listed commands are just default aliases for terminal scripts.
- `cat <vfs-file-path>`: print the contents of a file in the terminal.
- Example: `cat /terminal:sys/pkg/scripts.json`
@ -183,7 +187,7 @@ A list of the terminal scripts included in this distro:
- Example: `m our@eth:distro:sys "SetPublic" -a 5`
- the '-a' flag is used to expect a response with a given timeout
- `our` will always be interpolated by the system as your node's name
- `net_diagnostics`: print some useful networking diagnostic data.
- `net-diagnostics`: print some useful networking diagnostic data.
- `peer <name>`: print the peer's PKI info, if it exists.
- `peers`: print the peers the node currently hold connections with.
- `top <process_id>`: display kernel debugging info about a process. Leave the process ID blank to display info about all processes and get the total number of running processes.
@ -202,20 +206,32 @@ To build a local Docker image, run the following command in this project root.
```bash
# The `VERSION` may be replaced with the tag of a GitHub release
export VERSION=0.9.8
# Build for your system's architecture
docker build . -t 0xlynett/kinode --build-arg VERSION=v0.9.1
docker build . -t kinode-${VERSION} --build-arg VERSION=v${VERSION} --platform linux/amd64
# Build a multiarch image
docker buildx build . --platform arm64,amd64 --build-arg VERSION=v0.9.1 -t 0xlynett/kinode
docker buildx build . -t kinode-${VERSION} --build-arg VERSION=v${VERSION} --platform arm64,amd64
```
For example:
To run, for example for a node named `helloworld.os`:
```bash
docker volume create kinode-volume
export NODENAME=helloworld.os
docker run -d -p 8080:8080 -it --name my-kinode \
--mount type=volume,source=kinode-volume,destination=/kinode-home \
0xlynett/kinode
docker volume create kinode-${NODENAME}
docker run -p 8080:8080 --rm -it --name kinode-${NODENAME} --mount type=volume,source=kinode-${NODENAME},destination=/kinode-home kinode-${VERSION}
```
which will launch your Kinode container attached to the terminal.
Alternatively you can run it detached:
```
docker run -p 8080:8080 --rm -dt --name kinode-${NODENAME} --mount type=volume,source=kinode-${NODENAME},destination=/kinode-home kinode-${VERSION}
```
Note that the `-t` flag *must* be passed.
If it is not passed, you must pass the `--detached` argument to the Kinode binary, i.e.
```
docker run -p 8080:8080 --rm -d --name kinode-${NODENAME} --mount type=volume,source=kinode-${NODENAME},destination=/kinode-home kinode-${VERSION} /kinode-home --detached
```

View File

@ -1,3 +1,5 @@
@import url('https://fonts.googleapis.com/css2?family=Kode+Mono:wght@700&display=swap');
/* CSS Reset and Base Styles */
*,
*::before,
@ -23,8 +25,6 @@ select {
font-family: var(--font-family-main);
}
@import url('https://fonts.googleapis.com/css2?family=Kode+Mono:wght@700&display=swap');
/* Variables */
:root {
color-scheme: light dark;

View File

@ -1,7 +1,7 @@
[package]
name = "kinode"
authors = ["KinodeDAO"]
version = "0.9.10"
authors = ["Sybil Technologies AG"]
version = "0.10.0"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
@ -21,7 +21,7 @@ simulation-mode = []
[dependencies]
aes-gcm = "0.10.3"
alloy = { git = "https://github.com/kinode-dao/alloy.git", rev = "e672f3e", features = [
alloy = { version = "0.8.1", features = [
"consensus",
"contract",
"json-rpc",
@ -37,10 +37,11 @@ alloy = { git = "https://github.com/kinode-dao/alloy.git", rev = "e672f3e", feat
"signers",
"signer-local",
] }
alloy-primitives = "0.7.6"
alloy-sol-macro = "0.7.6"
alloy-sol-types = "0.7.6"
alloy-primitives = "0.8.15"
alloy-sol-macro = "0.8.15"
alloy-sol-types = "0.8.15"
anyhow = "1.0.71"
argon2 = "0.5.3"
async-trait = "0.1.71"
base64 = "0.22.0"
bincode = "1.3.3"
@ -72,20 +73,17 @@ rusqlite = { version = "0.31.0", features = ["bundled"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10.8"
sha3 = "0.10.8"
# snow = { version = "0.9.5", features = ["ring-resolver"] }
# unfortunately need to use forked version for async use and in-place encryption
snow = { git = "https://github.com/dr-frmr/snow", branch = "dr/extract_cipherstates", features = ["ring-resolver"] }
socket2 = "0.5.7"
static_dir = "0.2.0"
thiserror = "1.0"
tokio = { version = "1.28", features = ["fs", "macros", "rt-multi-thread", "signal", "sync"] }
tokio = { version = "1.28", features = ["fs", "macros", "rt-multi-thread", "signal", "sync", "time"] }
tokio-tungstenite = { version = "0.21.0", features = ["native-tls"] }
unicode-segmentation = "1.11"
unicode-width = "0.1.13"
url = "2.4.1"
warp = "0.3.5"
wasi-common = "19.0.1"
wasmtime = "19.0.1"
wasmtime-wasi = "19.0.1"
wasmtime = "27.0.0"
wasmtime-wasi = "27.0.0"
zip = "1.1.1"

View File

@ -30,14 +30,36 @@ fn compute_hash(file_path: &Path) -> anyhow::Result<String> {
fn main() -> anyhow::Result<()> {
let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") {
Ok(env_var) => env_var,
Ok(env_var) => {
let path = PathBuf::from(&env_var);
if !path.exists() {
let path = std::env::current_dir()?;
let Some(path) = path.parent() else {
return Err(anyhow::anyhow!(
"Given path to packages {env_var} not found (cwd: {:?})",
std::env::current_dir()
));
};
let path = path.join(&env_var);
if path.exists() {
path.display().to_string()
} else {
return Err(anyhow::anyhow!(
"Given path to packages {env_var} not found in parent of cwd: {:?}",
std::env::current_dir()
));
}
} else {
env_var
}
}
Err(_) => {
let canonical_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH);
if canonical_path.exists() {
p!("No path given via PATH_TO_PACKAGES_ZIP envvar. Defaulting to path of `kinode/target/packages.zip`.");
CANONICAL_PACKAGES_ZIP_PATH.to_string()
} else {
return Err(anyhow::anyhow!("You must build packages.zip with scripts/build_packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated)."));
return Err(anyhow::anyhow!("You must build packages.zip with scripts/build-packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated)."));
}
}
};
@ -49,6 +71,10 @@ fn main() -> anyhow::Result<()> {
}
let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?;
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH);
if !canonical_packages_zip_path.exists() {
std::fs::File::create(&canonical_packages_zip_path)?;
}
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?;
if path_to_packages_zip_path != canonical_packages_zip_path {
std::fs::copy(&path_to_packages_zip_path, &canonical_packages_zip_path)?;

View File

@ -1,11 +1,11 @@
[workspace]
resolver = "2"
members = [
"app_store",
"app-store",
"chain",
"download",
"downloads",
"ft_worker",
"ft-worker",
"install",
"uninstall",
]

View File

@ -1,7 +1,7 @@
/// The App Store manages the discovery, download, and installation of packages (apps)
/// The App Store manages the discovery, download, and installation of packages (apps)
/// in the Kinode ecosystem. It consists of three main processes: main, chain, and downloads.
/// main:app_store:sys
/// main:app-store:sys
interface main {
use standard.{package-id};
use chain.{onchain-metadata, chain-error};
@ -22,25 +22,41 @@ interface main {
/// Local requests that can be made to the App Store
variant local-request {
/// Request to add a new package to app_store. Includes zip and an optional onchain-metadata.
/// This is used by kit start
/// Request to add a new package to app-store.
///
/// Used by kit start-package.
///
/// lazy-load-blob: required; the zipped package to be added.
new-package(new-package-request),
/// Request to install a package
/// Request to install a package.
///
/// lazy-load-blob: none.
install(install-package-request),
/// Request to uninstall a package
/// Request to uninstall a package.
///
/// lazy-load-blob: none.
uninstall(package-id),
/// Request to list all available APIs
///
/// lazy-load-blob: none.
apis,
/// Request to get a specific API
///
/// lazy-load-blob: none.
get-api(package-id),
}
/// Local responses from the App Store
variant local-response {
/// lazy-load-blob: none.
new-package-response(new-package-response),
/// lazy-load-blob: none.
install-response(install-response),
/// lazy-load-blob: none.
uninstall-response(uninstall-response),
/// lazy-load-blob: none.
apis-response(apis-response),
/// lazy-load-blob: on success; the WIT API that was requested.
get-api-response(get-api-response),
}
@ -90,33 +106,55 @@ interface main {
}
}
/// chain:app_store:sys
/// chain:app-store:sys
/// This process holds information about on-chain ~uri and ~hash notes,
/// and fetches the metadata with the http_client:distro:sys when necessary.
/// and fetches the metadata with the http-client:distro:sys when necessary.
interface chain {
use standard.{package-id};
/// Requests that can be made to the chain component
variant chain-requests {
/// Get information about a specific app
///
/// lazy-load-blob: none.
get-app(package-id),
/// Get information about all apps
///
/// lazy-load-blob: none.
get-apps,
/// Get information about apps published by the current node
///
/// lazy-load-blob: none.
get-our-apps,
/// Start auto-updating an app
///
/// lazy-load-blob: none.
start-auto-update(package-id),
/// Stop auto-updating an app
///
/// lazy-load-blob: none.
stop-auto-update(package-id),
/// Reset app-store db
///
/// lazy-load-blob: none.
reset,
}
/// Responses from the chain component
variant chain-responses {
/// lazy-load-blob: none.
get-app(option<onchain-app>),
/// lazy-load-blob: none.
get-apps(list<onchain-app>),
/// lazy-load-blob: none.
get-our-apps(list<onchain-app>),
/// lazy-load-blob: none.
auto-update-started,
/// lazy-load-blob: none.
auto-update-stopped,
/// lazy-load-blob: none.
/// successful reset
reset-ok,
err(chain-error),
}
@ -159,11 +197,11 @@ interface chain {
}
}
/// downloads:app_store:sys
/// This process is responsible for downloading app packages from remote mirrors,
/// spawning ft_workers, and serves the files to the main:app_store:sys process.
/// downloads:app-store:sys
/// This process is responsible for downloading app packages from remote mirrors,
/// spawning ft_workers, and serves the files to the main:app-store:sys process.
/// It also manages the local storage of downloaded app zip packages.
///
///
interface downloads {
use standard.{package-id};
use chain.{onchain-metadata};
@ -171,37 +209,66 @@ interface downloads {
/// Requests that can be made to the downloads component
variant download-requests {
/// Request a remote download
///
/// lazy-load-blob: none.
remote-download(remote-download-request),
/// Request a chunk of a file
///
/// lazy-load-blob: none.
chunk(chunk-request),
/// Update download progress
///
/// lazy-load-blob: none.
progress(progress-update),
/// Update file size information
///
/// lazy-load-blob: none.
size(size-update),
/// Request a local download
///
/// lazy-load-blob: none.
local-download(local-download-request),
/// Request an auto-update
///
/// lazy-load-blob: none.
auto-update(auto-update-request),
/// Notify that a download is complete
///
/// lazy-load-blob: none.
download-complete(download-complete-request),
/// Auto-update-download complete
///
/// lazy-load-blob: none.
auto-download-complete(auto-download-complete-request),
/// Get files for a package
///
/// lazy-load-blob: none.
get-files(option<package-id>),
/// Remove a file
///
/// lazy-load-blob: none.
remove-file(remove-file-request),
/// Add a download
///
/// lazy-load-blob: none.
add-download(add-download-request),
/// Start mirroring a package
///
/// lazy-load-blob: none.
start-mirroring(package-id),
/// Stop mirroring a package
///
/// lazy-load-blob: none.
stop-mirroring(package-id),
}
/// Responses from the downloads component
variant download-responses {
/// lazy-load-blob: none.
success,
/// lazy-load-blob: none.
err(download-error),
/// lazy-load-blob: none.
get-files(list<entry>),
}
@ -236,6 +303,9 @@ interface downloads {
blob-not-found,
vfs-error,
handling-error(string),
timeout,
invalid-manifest,
offline,
}
/// Notification that a download is complete
@ -245,12 +315,26 @@ interface downloads {
err: option<download-error>,
}
/// Request for an auto-download complete
record auto-download-complete-request {
download-info: download-complete-request,
/// Variant for an auto-download complete
variant auto-download-complete-request {
success(auto-download-success),
err(auto-download-error),
}
/// Auto-download success
record auto-download-success {
package-id: package-id,
version-hash: string,
manifest-hash: string,
}
/// Auto-download error
record auto-download-error {
package-id: package-id,
version-hash: string,
tries: list<tuple<string, download-error>>, // (mirror, error)
}
/// Represents a hash mismatch error
record hash-mismatch {
desired: string,
@ -317,5 +401,5 @@ world app-store-sys-v1 {
import main;
import downloads;
import chain;
include process-v0;
}
include process-v1;
}

View File

@ -1,5 +1,5 @@
[package]
name = "app_store"
name = "app-store"
version = "0.3.1"
edition = "2021"
@ -7,12 +7,12 @@ edition = "2021"
simulation-mode = []
[dependencies]
alloy-primitives = "0.7.6"
alloy-sol-types = "0.7.6"
alloy-primitives = "0.8.15"
alloy-sol-types = "0.8.15"
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
kinode_process_lib = "0.10.0"
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@ -20,7 +20,7 @@ sha2 = "0.10.8"
sha3 = "0.10.8"
url = "2.4.1"
urlencoding = "2.1.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
zip = { version = "1.1.1", default-features = false }
[lib]

View File

@ -1,13 +1,15 @@
//! http_api for main:app_store:sys
//! http_api for main:app-store:sys
//! handles http_requests coming in, sending them to relevant processes (main/downloads/chain),
//! and sends back http_responses.
//!
use crate::{
kinode::process::chain::{ChainRequests, ChainResponses},
kinode::process::downloads::{
DownloadRequests, DownloadResponses, Entry, LocalDownloadRequest, RemoveFileRequest,
kinode::process::{
chain::{ChainRequests, ChainResponses},
downloads::{
DownloadRequests, DownloadResponses, Entry, LocalDownloadRequest, RemoveFileRequest,
},
},
state::{MirrorCheck, PackageState, State},
state::{MirrorCheck, PackageState, State, Updates},
};
use kinode_process_lib::{
http::{self, server, Method, StatusCode},
@ -28,6 +30,7 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
"/downloads", // all downloads
"/installed", // all installed apps
"/ourapps", // all apps we've published
"/updates", // all auto_updates
"/apps/:id", // detail about an on-chain app
"/downloads/:id", // local downloads for an app
"/installed/:id", // detail about an installed app
@ -37,7 +40,9 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
"/apps/:id/install", // install a downloaded app
"/downloads/:id/mirror", // start mirroring a version of a downloaded app
"/downloads/:id/remove", // remove a downloaded app
"/reset", // reset chain state, re-index
"/apps/:id/auto-update", // set auto-updating a version of a downloaded app
"/updates/:id/clear", // clear update info for an app.
"/mirrorcheck/:node", // check if a node/mirror is online/offline
] {
http_server
@ -143,7 +148,7 @@ fn make_widget() -> String {
<script>
document.addEventListener('DOMContentLoaded', function() {
function fetchApps() {
fetch('/main:app_store:sys/apps-public', { credentials: 'include' })
fetch('/main:app-store:sys/apps-public', { credentials: 'include' })
.then(response => response.json())
.then(data => {
const container = document.getElementById('latest-apps');
@ -152,7 +157,7 @@ fn make_widget() -> String {
if (app.metadata) {
const a = document.createElement('a');
a.className = 'app';
a.href = `/main:app_store:sys/app/${app.package_id.package_name}:${app.package_id.publisher_node}`
a.href = `/main:app-store:sys/app/${app.package_id.package_name}:${app.package_id.publisher_node}`
a.target = '_blank';
a.rel = 'noopener noreferrer';
const iconLetter = app.metadata_hash.replace('0x', '')[0].toUpperCase();
@ -207,9 +212,10 @@ fn make_widget() -> String {
pub fn handle_http_request(
our: &Address,
state: &mut State,
updates: &mut Updates,
req: &server::IncomingHttpRequest,
) -> (server::HttpResponse, Option<LazyLoadBlob>) {
match serve_paths(our, state, req) {
match serve_paths(our, state, updates, req) {
Ok((status_code, _headers, body)) => (
server::HttpResponse::new(status_code).header("Content-Type", "application/json"),
Some(LazyLoadBlob {
@ -248,13 +254,13 @@ fn gen_package_info(id: &PackageId, state: &PackageState) -> serde_json::Value {
"our_version_hash": state.our_version_hash,
"verified": state.verified,
"caps_approved": state.caps_approved,
"pending_update_hash": state.pending_update_hash,
})
}
fn serve_paths(
our: &Address,
state: &mut State,
updates: &mut Updates,
req: &server::IncomingHttpRequest,
) -> anyhow::Result<(http::StatusCode, Option<HashMap<String, String>>, Vec<u8>)> {
let method = req.method()?;
@ -266,7 +272,7 @@ fn serve_paths(
match bound_path {
// GET all apps
"/apps" | "/apps-public" => {
let resp = Request::to(("our", "chain", "app_store", "sys"))
let resp = Request::to(("our", "chain", "app-store", "sys"))
.body(serde_json::to_vec(&ChainRequests::GetApps)?)
.send_and_await_response(5)??;
let msg = serde_json::from_slice::<ChainResponses>(resp.body())?;
@ -292,7 +298,7 @@ fn serve_paths(
Method::GET => {
let package_id =
crate::kinode::process::main::PackageId::from_process_lib(package_id);
let resp = Request::to(("our", "chain", "app_store", "sys"))
let resp = Request::to(("our", "chain", "app-store", "sys"))
.body(serde_json::to_vec(&ChainRequests::GetApp(package_id))?)
.send_and_await_response(5)??;
let msg = serde_json::from_slice::<ChainResponses>(resp.body())?;
@ -305,7 +311,7 @@ fn serve_paths(
}
Method::DELETE => {
// uninstall an app
crate::utils::uninstall(state, &package_id)?;
crate::utils::uninstall(our, state, &package_id)?;
println!("successfully uninstalled {:?}", package_id);
Ok((
StatusCode::NO_CONTENT,
@ -322,7 +328,7 @@ fn serve_paths(
}
"/downloads" => {
// get all local downloads!
let resp = Request::to(("our", "downloads", "app_store", "sys"))
let resp = Request::to(("our", "downloads", "app-store", "sys"))
.body(serde_json::to_vec(&DownloadRequests::GetFiles(None))?)
.send_and_await_response(5)??;
@ -348,7 +354,7 @@ fn serve_paths(
));
};
let package_id = crate::kinode::process::main::PackageId::from_process_lib(package_id);
let resp = Request::to(("our", "downloads", "app_store", "sys"))
let resp = Request::to(("our", "downloads", "app-store", "sys"))
.body(serde_json::to_vec(&DownloadRequests::GetFiles(Some(
package_id,
)))?)
@ -387,7 +393,7 @@ fn serve_paths(
let package_id = crate::kinode::process::main::PackageId::from_process_lib(package_id);
// get the file corresponding to the version hash, extract manifest and return.
let resp = Request::to(("our", "downloads", "app_store", "sys"))
let resp = Request::to(("our", "downloads", "app-store", "sys"))
.body(serde_json::to_vec(&DownloadRequests::GetFiles(Some(
package_id.clone(),
)))?)
@ -466,7 +472,7 @@ fn serve_paths(
));
}
"/ourapps" => {
let resp = Request::to(("our", "chain", "app_store", "sys"))
let resp = Request::to(("our", "chain", "app-store", "sys"))
.body(serde_json::to_vec(&ChainRequests::GetOurApps)?)
.send_and_await_response(5)??;
let msg = serde_json::from_slice::<ChainResponses>(resp.body())?;
@ -509,7 +515,7 @@ fn serve_paths(
desired_version_hash: version_hash,
});
Request::to(("our", "downloads", "app_store", "sys"))
Request::to(("our", "downloads", "app-store", "sys"))
.body(serde_json::to_vec(&download_request)?)
.send()?;
Ok((
@ -533,7 +539,6 @@ fn serve_paths(
.ok_or(anyhow::anyhow!("missing blob"))?
.bytes;
let body_json: serde_json::Value = serde_json::from_slice(&body).unwrap_or_default();
let version_hash = body_json
.get("version_hash")
.and_then(|v| v.as_str())
@ -574,7 +579,7 @@ fn serve_paths(
format!("Missing id").into_bytes(),
));
};
let downloads = Address::from_str("our@downloads:app_store:sys")?;
let downloads = Address::from_str("our@downloads:app-store:sys")?;
match method {
// start mirroring an app
@ -647,7 +652,7 @@ fn serve_paths(
version_hash: version_hash,
});
let resp = Request::to(("our", "downloads", "app_store", "sys"))
let resp = Request::to(("our", "downloads", "app-store", "sys"))
.body(serde_json::to_vec(&download_request)?)
.send_and_await_response(5)??;
let msg = serde_json::from_slice::<DownloadResponses>(resp.body())?;
@ -681,7 +686,7 @@ fn serve_paths(
}
};
let resp = Request::to(("our", "chain", "app_store", "sys"))
let resp = Request::to(("our", "chain", "app-store", "sys"))
.body(serde_json::to_vec(&chain_request)?)
.send_and_await_response(5)??;
@ -697,6 +702,53 @@ fn serve_paths(
)),
}
}
// GET all failed/pending auto_updates
"/updates" => {
let serialized = serde_json::to_vec(&updates).unwrap_or_default();
return Ok((StatusCode::OK, None, serialized));
}
// POST clear all failed/pending auto_updates for a package_id
"/updates/:id/clear" => {
let Ok(package_id) = get_package_id(url_params) else {
return Ok((
StatusCode::BAD_REQUEST,
None,
format!("Missing package_id").into_bytes(),
));
};
if method != Method::POST {
return Ok((
StatusCode::METHOD_NOT_ALLOWED,
None,
format!("Invalid method {method} for {bound_path}").into_bytes(),
));
}
let _ = updates.package_updates.remove(&package_id);
updates.save();
Ok((StatusCode::OK, None, vec![]))
}
// POST reset chain state, re-index
"/reset" => {
if method != Method::POST {
return Ok((
StatusCode::METHOD_NOT_ALLOWED,
None,
format!("Invalid method {method} for {bound_path}").into_bytes(),
));
}
let chain = Address::from_str("our@chain:app-store:sys")?;
let resp = Request::new()
.target(chain)
.body(&ChainRequests::Reset)
.send_and_await_response(5)??;
let msg = serde_json::from_slice::<ChainResponses>(resp.body())?;
if let ChainResponses::ResetOk = msg {
Ok((StatusCode::OK, None, vec![]))
} else {
Ok((StatusCode::INTERNAL_SERVER_ERROR, None, vec![]))
}
}
// GET online/offline mirrors for a listed app
"/mirrorcheck/:node" => {
if method != Method::GET {

View File

@ -1,5 +1,5 @@
#![feature(let_chains)]
//! main:app_store:sys
//! main:app-store:sys
//!
//! This process serves as the primary interface for the App Store system in the Kinode ecosystem.
//! It coordinates between the http user interface, the chain process, and the downloads process.
@ -42,7 +42,7 @@ use kinode_process_lib::{
LazyLoadBlob, Message, PackageId, Response,
};
use serde::{Deserialize, Serialize};
use state::State;
use state::{State, UpdateInfo, Updates};
wit_bindgen::generate!({
path: "target/wit",
@ -78,20 +78,22 @@ pub enum Resp {
call_init!(init);
fn init(our: Address) {
println!("started");
let mut http_server = http::server::HttpServer::new(5);
http_api::init_frontend(&our, &mut http_server);
// state = state built from the filesystem, installed packages
// updates = state saved with get/set_state(), auto_update metadata.
let mut state = State::load().expect("state loading failed");
let mut updates = Updates::load();
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("main: got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(&our, &mut state, &mut http_server, &message) {
if let Err(e) =
handle_message(&our, &mut state, &mut updates, &mut http_server, &message)
{
let error_message = format!("error handling message: {e:?}");
print_to_terminal(1, &error_message);
Response::new()
@ -111,6 +113,7 @@ fn init(our: Address) {
fn handle_message(
our: &Address,
state: &mut State,
updates: &mut Updates,
http_server: &mut http::server::HttpServer,
message: &Message,
) -> anyhow::Result<()> {
@ -129,12 +132,12 @@ fn handle_message(
}
}
Req::Http(server_request) => {
if !message.is_local(&our) || message.source().process != "http_server:distro:sys" {
return Err(anyhow::anyhow!("http_server from non-local node"));
if !message.is_local(&our) || message.source().process != "http-server:distro:sys" {
return Err(anyhow::anyhow!("http-server from non-local node"));
}
http_server.handle_request(
server_request,
|incoming| http_api::handle_http_request(our, state, &incoming),
|incoming| http_api::handle_http_request(our, state, updates, &incoming),
|_channel_id, _message_type, _blob| {
// not expecting any websocket messages from FE currently
},
@ -142,7 +145,7 @@ fn handle_message(
}
Req::Progress(progress) => {
if !message.is_local(&our) {
return Err(anyhow::anyhow!("http_server from non-local node"));
return Err(anyhow::anyhow!("http-server from non-local node"));
}
http_server.ws_push_all_channels(
"/",
@ -168,40 +171,80 @@ fn handle_message(
"auto download complete from non-local node"
));
}
// auto_install case:
// the downloads process has given us the new package manifest's
// capability hashes, and the old package's capability hashes.
// we can use these to determine if the new package has the same
// capabilities as the old one, and if so, auto-install it.
let manifest_hash = req.manifest_hash;
let package_id = req.download_info.package_id;
let version_hash = req.download_info.version_hash;
match req {
AutoDownloadCompleteRequest::Success(succ) => {
// auto_install case:
// the downloads process has given us the new package manifest's
// capability hashes, and the old package's capability hashes.
// we can use these to determine if the new package has the same
// capabilities as the old one, and if so, auto-install it.
let manifest_hash = succ.manifest_hash;
let package_id = succ.package_id;
let version_hash = succ.version_hash;
let process_lib_package_id = package_id.clone().to_process_lib();
let process_lib_package_id = package_id.clone().to_process_lib();
// first, check if we have the package and get its manifest hash
let should_auto_install = state
.packages
.get(&process_lib_package_id)
.map(|package| package.manifest_hash == Some(manifest_hash.clone()))
.unwrap_or(false);
// first, check if we have the package and get its manifest hash
let should_auto_install = state
.packages
.get(&process_lib_package_id)
.map(|package| package.manifest_hash == Some(manifest_hash.clone()))
.unwrap_or(false);
if should_auto_install {
if let Err(e) =
utils::install(&package_id, None, &version_hash, state, &our.node)
{
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
package.pending_update_hash = Some(version_hash);
if should_auto_install {
if let Err(e) =
utils::install(&package_id, None, &version_hash, state, &our.node)
{
println!("error auto-installing package: {e}");
// Get or create the outer map for this package
updates
.package_updates
.entry(package_id.to_process_lib())
.or_default()
.insert(
version_hash.clone(),
UpdateInfo {
errors: vec![],
pending_manifest_hash: Some(manifest_hash.clone()),
},
);
updates.save();
} else {
println!(
"auto-installed update for package: {process_lib_package_id}"
);
}
} else {
// TODO.
updates
.package_updates
.entry(package_id.to_process_lib())
.or_default()
.insert(
version_hash.clone(),
UpdateInfo {
errors: vec![],
pending_manifest_hash: Some(manifest_hash.clone()),
},
);
updates.save();
}
println!("error auto-installing package: {e}");
} else {
println!("auto-installed update for package: {process_lib_package_id}");
}
} else {
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
package.pending_update_hash = Some(version_hash);
println!("error auto-installing package: manifest hash mismatch");
AutoDownloadCompleteRequest::Err(err) => {
println!("error auto-downloading package: {err:?}");
updates
.package_updates
.entry(err.package_id.to_process_lib())
.or_default()
.insert(
err.version_hash.clone(),
UpdateInfo {
errors: err.tries,
pending_manifest_hash: None,
},
);
updates.save();
}
}
}
@ -283,7 +326,7 @@ fn handle_local_request(
None,
),
LocalRequest::Uninstall(package_id) => (
match utils::uninstall(state, &package_id.clone().to_process_lib()) {
match utils::uninstall(our, state, &package_id.clone().to_process_lib()) {
Ok(()) => {
println!(
"successfully uninstalled package: {:?}",

View File

@ -1,10 +1,10 @@
use crate::{utils, VFS_TIMEOUT};
use kinode_process_lib::{kimap, vfs, PackageId};
use crate::{kinode::process::downloads::DownloadError, utils, VFS_TIMEOUT};
use kinode_process_lib::{get_state, kimap, set_state, vfs, PackageId};
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
//
// main:app_store types
// main:app-store types
//
#[derive(Debug, Serialize, Deserialize)]
@ -54,9 +54,6 @@ pub struct PackageState {
/// capabilities have changed. if they have changed, auto-install must fail
/// and the user must approve the new capabilities.
pub manifest_hash: Option<String>,
/// stores the version hash of a failed auto-install attempt, which can be
/// later installed by the user by approving new caps.
pub pending_update_hash: Option<String>,
}
// this seems cleaner to me right now with pending_update_hash, but given how we serialize
@ -133,7 +130,6 @@ impl State {
verified: true, // implicitly verified (TODO re-evaluate)
caps_approved: false, // must re-approve if you want to do something ??
manifest_hash: Some(manifest_hash),
pending_update_hash: None, // ... this could be a separate state saved. don't want to reflect this info on-disk as a file.
},
);
@ -147,3 +143,76 @@ impl State {
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Updates {
#[serde(with = "package_id_map")]
pub package_updates: HashMap<PackageId, HashMap<String, UpdateInfo>>, // package id -> version_hash -> update info
}
impl Default for Updates {
fn default() -> Self {
Self {
package_updates: HashMap::new(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct UpdateInfo {
pub errors: Vec<(String, DownloadError)>, // errors collected by downloads process
pub pending_manifest_hash: Option<String>, // pending manifest hash that differed from the installed one
}
impl Updates {
pub fn load() -> Self {
let bytes = get_state();
if let Some(bytes) = bytes {
serde_json::from_slice(&bytes).unwrap_or_default()
} else {
Self::default()
}
}
pub fn save(&self) {
let bytes = serde_json::to_vec(self).unwrap_or_default();
set_state(&bytes);
}
}
// note: serde_json doesn't support non-string keys when serializing maps, so
// we have to use a custom simple serializer.
mod package_id_map {
use super::*;
use std::{collections::HashMap, str::FromStr};
pub fn serialize<S>(
map: &HashMap<PackageId, HashMap<String, UpdateInfo>>,
s: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeMap;
let mut map_ser = s.serialize_map(Some(map.len()))?;
for (k, v) in map {
map_ser.serialize_entry(&k.to_string(), v)?;
}
map_ser.end()
}
pub fn deserialize<'de, D>(
d: D,
) -> Result<HashMap<PackageId, HashMap<String, UpdateInfo>>, D::Error>
where
D: serde::Deserializer<'de>,
{
let string_map = HashMap::<String, HashMap<String, UpdateInfo>>::deserialize(d)?;
Ok(string_map
.into_iter()
.filter_map(|(k, v)| PackageId::from_str(&k).ok().map(|pid| (pid, v)))
.collect())
}
}

View File

@ -8,8 +8,8 @@ use {
VFS_TIMEOUT,
},
kinode_process_lib::{
get_blob, kernel_types as kt, println, vfs, Address, LazyLoadBlob, PackageId, ProcessId,
Request,
get_blob, kernel_types as kt, println, vfs, Address, Capability, LazyLoadBlob, PackageId,
ProcessId, Request,
},
std::collections::{HashMap, HashSet},
};
@ -69,7 +69,7 @@ pub fn fetch_package_manifest(
pub fn fetch_package_metadata(
package_id: &crate::kinode::process::main::PackageId,
) -> anyhow::Result<OnchainMetadata> {
let resp = Request::to(("our", "chain", "app_store", "sys"))
let resp = Request::to(("our", "chain", "app-store", "sys"))
.body(serde_json::to_vec(&ChainRequests::GetApp(package_id.clone())).unwrap())
.send_and_await_response(5)??;
@ -78,7 +78,7 @@ pub fn fetch_package_metadata(
ChainResponses::GetApp(Some(app)) => app,
_ => {
return Err(anyhow::anyhow!(
"No app data found in response from chain:app_store:sys"
"No app data found in response from chain:app-store:sys"
))
}
};
@ -86,7 +86,7 @@ pub fn fetch_package_metadata(
Some(metadata) => metadata,
None => {
return Err(anyhow::anyhow!(
"No metadata found in response from chain:app_store:sys"
"No metadata found in response from chain:app-store:sys"
))
}
};
@ -101,7 +101,7 @@ pub fn new_package(
// set the version hash for this new local package
let version_hash = sha_256_hash(&bytes);
let resp = Request::to(("our", "downloads", "app_store", "sys"))
let resp = Request::to(("our", "downloads", "app-store", "sys"))
.body(serde_json::to_vec(&DownloadRequests::AddDownload(
AddDownloadRequest {
package_id: package_id.clone(),
@ -213,7 +213,7 @@ pub fn install(
) -> anyhow::Result<()> {
let process_package_id = package_id.clone().to_process_lib();
let file = vfs::open_file(
&format!("/app_store:sys/downloads/{process_package_id}/{version_hash}.zip"),
&format!("/app-store:sys/downloads/{process_package_id}/{version_hash}.zip"),
false,
Some(VFS_TIMEOUT),
)?;
@ -225,7 +225,6 @@ pub fn install(
verified: true, // sideloaded apps are implicitly verified because there is no "source" to verify against
caps_approved: true, // TODO see if we want to auto-approve local installs
manifest_hash: Some(manifest_hash),
pending_update_hash: None, // TODO: doublecheck if problematically overwrites auto_update state.
};
if let Ok(extracted) = extract_api(&process_package_id) {
@ -404,7 +403,7 @@ pub fn install(
/// given a `PackageId`, read its manifest, kill all processes declared in it,
/// then remove its drive in the virtual filesystem.
pub fn uninstall(state: &mut State, package_id: &PackageId) -> anyhow::Result<()> {
pub fn uninstall(our: &Address, state: &mut State, package_id: &PackageId) -> anyhow::Result<()> {
if !state.packages.contains_key(package_id) {
return Err(anyhow::anyhow!("package not found"));
}
@ -426,13 +425,30 @@ pub fn uninstall(state: &mut State, package_id: &PackageId) -> anyhow::Result<()
let manifest = serde_json::from_slice::<Vec<kt::PackageManifestEntry>>(&blob.bytes)?;
// reading from the package manifest, kill every process named
// *and* remove it from the homepage!
for entry in &manifest {
kernel_request(kt::KernelCommand::KillProcess(ProcessId::new(
let process_id = ProcessId::new(
Some(&entry.process_name),
package_id.package(),
package_id.publisher(),
)))
.send()?;
);
kernel_request(kt::KernelCommand::KillProcess(process_id.clone())).send()?;
// we have a unique capability that allows this, which we must attach
Request::to(("our", "homepage", "homepage", "sys"))
.body(
serde_json::json!({
"RemoveOther": process_id,
})
.to_string()
.as_bytes(),
)
.capabilities(vec![Capability::new(
Address::new(&our.node, ("homepage", "homepage", "sys")),
"\"RemoveOther\"".to_string(),
)])
.send()?;
}
// then, delete the drive

View File

@ -7,12 +7,12 @@ edition = "2021"
simulation-mode = []
[dependencies]
alloy-primitives = "0.7.6"
alloy-sol-types = "0.7.6"
alloy-primitives = "0.8.15"
alloy-sol-types = "0.8.15"
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
kinode_process_lib = "0.10.0"
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@ -20,7 +20,7 @@ sha2 = "0.10.8"
sha3 = "0.10.8"
url = "2.4.1"
urlencoding = "2.1.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
zip = { version = "1.1.1", default-features = false }
[lib]

View File

@ -0,0 +1,881 @@
#![feature(let_chains)]
//! chain:app-store:sys
//! This process manages the on-chain interactions for the App Store system in the Kinode ecosystem.
//! It is responsible for indexing and tracking app metadata stored on the blockchain.
//!
//! ## Responsibilities:
//!
//! 1. Index and track app metadata from the blockchain.
//! 2. Manage subscriptions to relevant blockchain events.
//! 3. Provide up-to-date information about available apps and their metadata.
//! 4. Handle auto-update settings for apps.
//!
//! ## Key Components:
//!
//! - `handle_eth_log`: Processes blockchain events related to app metadata updates.
//! - `fetch_and_subscribe_logs`: Initializes and maintains blockchain event subscriptions.
//!
//! ## Interaction Flow:
//!
//! 1. The process subscribes to relevant blockchain events on startup.
//! 2. When new events are received, they are processed to update the local state.
//! 3. Other processes (like main) can request information about apps.
//! 4. The chain process responds with the most up-to-date information from its local state.
//!
//! Note: This process does not handle app binaries or installation. It focuses solely on
//! metadata management and providing information about available apps.
//!
use crate::kinode::process::chain::{
ChainError, ChainRequests, OnchainApp, OnchainMetadata, OnchainProperties,
};
use crate::kinode::process::downloads::{AutoUpdateRequest, DownloadRequests};
use alloy_primitives::keccak256;
use alloy_sol_types::SolEvent;
use kinode::process::chain::ChainResponses;
use kinode_process_lib::{
await_message, call_init, eth, get_blob, http, kernel_types as kt, kimap, print_to_terminal,
println,
sqlite::{self, Sqlite},
timer, Address, Message, PackageId, Request, Response,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::str::FromStr;
wit_bindgen::generate!({
path: "target/wit",
generate_unused_types: true,
world: "app-store-sys-v1",
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
});
#[cfg(not(feature = "simulation-mode"))]
const CHAIN_ID: u64 = kimap::KIMAP_CHAIN_ID;
#[cfg(feature = "simulation-mode")]
const CHAIN_ID: u64 = 31337; // local
const CHAIN_TIMEOUT: u64 = 60; // 60s
#[cfg(not(feature = "simulation-mode"))]
const KIMAP_ADDRESS: &'static str = kimap::KIMAP_ADDRESS; // optimism
#[cfg(feature = "simulation-mode")]
const KIMAP_ADDRESS: &str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C";
const DELAY_MS: u64 = 1_000; // 1s
pub struct State {
/// the kimap helper we are using
pub kimap: kimap::Kimap,
/// the last block at which we saved the state of the listings to disk.
/// when we boot, we can read logs starting from this block and
/// rebuild latest state.
pub last_saved_block: u64,
/// tables: listings: <packade_id, listing>, published: vec<package_id>
pub db: DB,
}
/// listing information derived from metadata hash in listing event
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PackageListing {
pub tba: eth::Address,
pub metadata_uri: String,
pub metadata_hash: String,
pub metadata: Option<kt::Erc721Metadata>,
pub auto_update: bool,
pub block: u64,
}
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
#[serde(untagged)] // untagged as a meta-type for all incoming requests
pub enum Req {
Eth(eth::EthSubResult),
Request(ChainRequests),
}
pub struct DB {
inner: Sqlite,
}
impl DB {
pub fn connect(our: &Address) -> anyhow::Result<Self> {
let inner = sqlite::open(our.package_id(), "app_store_chain.sqlite", Some(10))?;
// create tables
inner.write(CREATE_META_TABLE.into(), vec![], None)?;
inner.write(CREATE_LISTINGS_TABLE.into(), vec![], None)?;
inner.write(CREATE_PUBLISHED_TABLE.into(), vec![], None)?;
Ok(Self { inner })
}
pub fn reset(&self, our: &Address) {
if let Err(e) = sqlite::remove_db(our.package_id(), "app_store_chain.sqlite", None) {
println!("failed to reset app-store DB: {e}");
}
}
pub fn get_last_saved_block(&self) -> anyhow::Result<u64> {
let query = "SELECT value FROM meta WHERE key = 'last_saved_block'";
let rows = self.inner.read(query.into(), vec![])?;
if let Some(row) = rows.get(0) {
if let Some(val_str) = row.get("value").and_then(|v| v.as_str()) {
if let Ok(block) = val_str.parse::<u64>() {
return Ok(block);
}
}
}
Ok(0)
}
pub fn set_last_saved_block(&self, block: u64) -> anyhow::Result<()> {
let query = "INSERT INTO meta (key, value) VALUES ('last_saved_block', ?)
ON CONFLICT(key) DO UPDATE SET value=excluded.value";
let params = vec![block.to_string().into()];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn insert_or_update_listing(
&self,
package_id: &PackageId,
listing: &PackageListing,
) -> anyhow::Result<()> {
let metadata_json = if let Some(m) = &listing.metadata {
serde_json::to_string(m)?
} else {
"".to_string()
};
let query = "INSERT INTO listings (package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(package_name, publisher_node)
DO UPDATE SET
tba=excluded.tba,
metadata_uri=excluded.metadata_uri,
metadata_hash=excluded.metadata_hash,
metadata_json=excluded.metadata_json,
auto_update=excluded.auto_update,
block=excluded.block";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
listing.tba.to_string().into(),
listing.metadata_uri.clone().into(),
listing.metadata_hash.clone().into(),
metadata_json.into(),
(if listing.auto_update { 1 } else { 0 }).into(),
listing.block.into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn delete_listing(&self, package_id: &PackageId) -> anyhow::Result<()> {
let query = "DELETE FROM listings WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn get_listing(&self, package_id: &PackageId) -> anyhow::Result<Option<PackageListing>> {
let query = "SELECT tba, metadata_uri, metadata_hash, metadata_json, auto_update, block FROM listings WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
let rows = self.inner.read(query.into(), params)?;
if let Some(row) = rows.get(0) {
Ok(Some(self.row_to_listing(row)?))
} else {
Ok(None)
}
}
pub fn get_all_listings(&self) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
let query = "SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block FROM listings";
let rows = self.inner.read(query.into(), vec![])?;
let mut listings = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
let listing = self.row_to_listing(&row)?;
listings.push((pid, listing));
}
Ok(listings)
}
pub fn get_listings_batch(
&self,
limit: u64,
offset: u64,
) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
let query = format!(
"SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block
FROM listings
ORDER BY package_name, publisher_node
LIMIT {} OFFSET {}",
limit, offset
);
let rows = self.inner.read(query, vec![])?;
let mut listings = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
let listing = self.row_to_listing(&row)?;
listings.push((pid, listing));
}
Ok(listings)
}
pub fn get_listings_since_block(
&self,
block_number: u64,
) -> anyhow::Result<Vec<(PackageId, PackageListing)>> {
let query = "SELECT package_name, publisher_node, tba, metadata_uri, metadata_hash, metadata_json, auto_update, block
FROM listings
WHERE block > ?";
let params = vec![block_number.into()];
let rows = self.inner.read(query.into(), params)?;
let mut listings = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
let listing = self.row_to_listing(&row)?;
listings.push((pid, listing));
}
Ok(listings)
}
pub fn row_to_listing(
&self,
row: &HashMap<String, serde_json::Value>,
) -> anyhow::Result<PackageListing> {
let tba_str = row["tba"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Invalid tba"))?;
let tba = tba_str.parse::<eth::Address>()?;
let metadata_uri = row["metadata_uri"].as_str().unwrap_or("").to_string();
let metadata_hash = row["metadata_hash"].as_str().unwrap_or("").to_string();
let metadata_json = row["metadata_json"].as_str().unwrap_or("");
let metadata: Option<kinode_process_lib::kernel_types::Erc721Metadata> =
if metadata_json.is_empty() {
None
} else {
serde_json::from_str(metadata_json)?
};
let auto_update = row["auto_update"].as_i64().unwrap_or(0) == 1;
let block = row["block"].as_i64().unwrap_or(0) as u64;
Ok(PackageListing {
tba,
metadata_uri,
metadata_hash,
metadata,
auto_update,
block,
})
}
pub fn get_published(&self, package_id: &PackageId) -> anyhow::Result<bool> {
let query = "SELECT 1 FROM published WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
let rows = self.inner.read(query.into(), params)?;
Ok(!rows.is_empty())
}
pub fn insert_published(&self, package_id: &PackageId) -> anyhow::Result<()> {
let query = "INSERT INTO published (package_name, publisher_node) VALUES (?, ?) ON CONFLICT DO NOTHING";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn delete_published(&self, package_id: &PackageId) -> anyhow::Result<()> {
let query = "DELETE FROM published WHERE package_name = ? AND publisher_node = ?";
let params = vec![
package_id.package_name.clone().into(),
package_id.publisher_node.clone().into(),
];
self.inner.write(query.into(), params, None)?;
Ok(())
}
pub fn get_all_published(&self) -> anyhow::Result<Vec<PackageId>> {
let query = "SELECT package_name, publisher_node FROM published";
let rows = self.inner.read(query.into(), vec![])?;
let mut result = Vec::new();
for row in rows {
let pid = PackageId {
package_name: row["package_name"].as_str().unwrap_or("").to_string(),
publisher_node: row["publisher_node"].as_str().unwrap_or("").to_string(),
};
result.push(pid);
}
Ok(result)
}
}
const CREATE_META_TABLE: &str = "
CREATE TABLE IF NOT EXISTS meta (
key TEXT PRIMARY KEY,
value TEXT
);";
const CREATE_LISTINGS_TABLE: &str = "
CREATE TABLE IF NOT EXISTS listings (
package_name TEXT NOT NULL,
publisher_node TEXT NOT NULL,
tba TEXT NOT NULL,
metadata_uri TEXT,
metadata_hash TEXT,
metadata_json TEXT,
auto_update INTEGER NOT NULL DEFAULT 0,
block INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (package_name, publisher_node)
);";
const CREATE_PUBLISHED_TABLE: &str = "
CREATE TABLE IF NOT EXISTS published (
package_name TEXT NOT NULL,
publisher_node TEXT NOT NULL,
PRIMARY KEY (package_name, publisher_node)
);";
call_init!(init);
fn init(our: Address) {
let eth_provider: eth::Provider = eth::Provider::new(CHAIN_ID, CHAIN_TIMEOUT);
let db = DB::connect(&our).expect("failed to open DB");
let kimap_helper =
kimap::Kimap::new(eth_provider, eth::Address::from_str(KIMAP_ADDRESS).unwrap());
let last_saved_block = db.get_last_saved_block().unwrap_or(0);
let mut state = State {
kimap: kimap_helper,
last_saved_block,
db,
};
fetch_and_subscribe_logs(&our, &mut state, last_saved_block);
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("chain: got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(&our, &mut state, &message) {
print_to_terminal(1, &format!("chain: error handling message: {:?}", e));
}
}
}
}
}
fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow::Result<()> {
if !message.is_request() {
if message.is_local(&our) && message.source().process == "timer:distro:sys" {
let Some(context) = message.context() else {
return Err(anyhow::anyhow!("No context in timer message"));
};
let log = serde_json::from_slice(context)?;
handle_eth_log(our, state, log, false)?;
return Ok(());
}
} else {
match serde_json::from_slice::<Req>(message.body())? {
Req::Eth(eth_result) => {
if !message.is_local(our) || message.source().process != "eth:distro:sys" {
return Err(anyhow::anyhow!(
"eth sub event from unexpected address: {}",
message.source()
));
}
if let Ok(eth::EthSub { result, .. }) = eth_result {
if let Ok(eth::SubscriptionResult::Log(ref log)) =
serde_json::from_value::<eth::SubscriptionResult>(result)
{
// delay handling of ETH RPC subscriptions by DELAY_MS
// to allow kns to have a chance to process block
timer::set_timer(DELAY_MS, Some(serde_json::to_vec(log)?));
}
} else {
// re-subscribe if error
state
.kimap
.provider
.subscribe_loop(1, app_store_filter(state), 1, 0);
}
}
Req::Request(chains) => {
handle_local_request(our, state, chains)?;
}
}
}
Ok(())
}
fn handle_local_request(
our: &Address,
state: &mut State,
req: ChainRequests,
) -> anyhow::Result<()> {
match req {
ChainRequests::GetApp(package_id) => {
let pid = package_id.clone().to_process_lib();
let listing = state.db.get_listing(&pid)?;
let onchain_app = listing.map(|app| app.to_onchain_app(&pid));
let response = ChainResponses::GetApp(onchain_app);
Response::new().body(&response).send()?;
}
ChainRequests::GetApps => {
let listings = state.db.get_all_listings()?;
let apps: Vec<OnchainApp> = listings
.into_iter()
.map(|(pid, listing)| listing.to_onchain_app(&pid))
.collect();
let response = ChainResponses::GetApps(apps);
Response::new().body(&response).send()?;
}
ChainRequests::GetOurApps => {
let published_list = state.db.get_all_published()?;
let mut apps = Vec::new();
for pid in published_list {
if let Some(listing) = state.db.get_listing(&pid)? {
apps.push(listing.to_onchain_app(&pid));
}
}
let response = ChainResponses::GetOurApps(apps);
Response::new().body(&response).send()?;
}
ChainRequests::StartAutoUpdate(package_id) => {
let pid = package_id.to_process_lib();
if let Some(mut listing) = state.db.get_listing(&pid)? {
listing.auto_update = true;
state.db.insert_or_update_listing(&pid, &listing)?;
let response = ChainResponses::AutoUpdateStarted;
Response::new().body(&response).send()?;
} else {
let error_response = ChainResponses::Err(ChainError::NoPackage);
Response::new().body(&error_response).send()?;
}
}
ChainRequests::StopAutoUpdate(package_id) => {
let pid = package_id.to_process_lib();
if let Some(mut listing) = state.db.get_listing(&pid)? {
listing.auto_update = false;
state.db.insert_or_update_listing(&pid, &listing)?;
let response = ChainResponses::AutoUpdateStopped;
Response::new().body(&response).send()?;
} else {
let error_response = ChainResponses::Err(ChainError::NoPackage);
Response::new().body(&error_response).send()?;
}
}
ChainRequests::Reset => {
state.db.reset(&our);
Response::new().body(&ChainResponses::ResetOk).send()?;
panic!("resetting state, restarting!");
}
}
Ok(())
}
fn handle_eth_log(
our: &Address,
state: &mut State,
log: eth::Log,
startup: bool,
) -> anyhow::Result<()> {
let block_number: u64 = log
.block_number
.ok_or(anyhow::anyhow!("log missing block number"))?;
let Ok(note) = kimap::decode_note_log(&log) else {
// ignore invalid logs here -- they're not actionable
return Ok(());
};
let package_id = note
.parent_path
.split_once('.')
.ok_or(anyhow::anyhow!("invalid publisher name"))
.and_then(|(package, publisher)| {
if package.is_empty() || publisher.is_empty() {
Err(anyhow::anyhow!("invalid publisher name"))
} else {
Ok(PackageId::new(package, publisher))
}
})?;
// the app store exclusively looks for ~metadata-uri postings: if one is
// observed, we then *query* for ~metadata-hash to verify the content
// at the URI.
let metadata_uri = String::from_utf8_lossy(&note.data).to_string();
let is_our_package = package_id.publisher() == our.node();
let (tba, metadata_hash) = if !startup {
// generate ~metadata-hash full-path
let hash_note = format!("~metadata-hash.{}", note.parent_path);
// owner can change which we don't track (yet?) so don't save, need to get when desired
let (tba, _owner, data) = match state.kimap.get(&hash_note) {
Ok(gr) => Ok(gr),
Err(e) => match e {
eth::EthError::RpcError(_) => {
// retry on RpcError after DELAY_MS sleep
// sleep here rather than with, e.g., a message to
// `timer:distro:sys` so that events are processed in
// order of receipt
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
state.kimap.get(&hash_note)
}
_ => Err(e),
},
}
.map_err(|e| anyhow::anyhow!("Couldn't find {hash_note}: {e:?}"))?;
match data {
None => {
// unpublish if metadata_uri empty
if metadata_uri.is_empty() {
state.db.delete_published(&package_id)?;
state.db.delete_listing(&package_id)?;
state.last_saved_block = block_number;
state.db.set_last_saved_block(block_number)?;
return Ok(());
}
return Err(anyhow::anyhow!(
"metadata hash not found: {package_id}, {metadata_uri}"
));
}
Some(hash_note) => (tba, String::from_utf8_lossy(&hash_note).to_string()),
}
} else {
(eth::Address::ZERO, String::new())
};
if is_our_package {
state.db.insert_published(&package_id)?;
}
// if this is a startup event, we don't need to fetch metadata from the URI --
// we'll loop over all listings after processing all logs and fetch them as needed.
// fetch metadata from the URI (currently only handling HTTP(S) URLs!)
// assert that the metadata hash matches the fetched data
let metadata = if !startup {
Some(fetch_metadata_from_url(&metadata_uri, &metadata_hash, 30)?)
} else {
None
};
let mut listing = state
.db
.get_listing(&package_id)?
.unwrap_or(PackageListing {
tba,
metadata_uri: metadata_uri.clone(),
metadata_hash: metadata_hash.clone(),
metadata: metadata.clone(),
auto_update: false,
block: block_number,
});
// update fields
listing.tba = tba;
listing.metadata_uri = metadata_uri;
listing.metadata_hash = metadata_hash;
listing.metadata = metadata.clone();
state.db.insert_or_update_listing(&package_id, &listing)?;
if !startup && listing.auto_update {
println!("kicking off auto-update for: {}", package_id);
Request::to(("our", "downloads", "app-store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id.clone(),
),
metadata: metadata.unwrap().into(),
}))
.send()
.unwrap();
}
if !startup {
state.last_saved_block = block_number;
state.db.set_last_saved_block(block_number)?;
}
Ok(())
}
/// after startup, fetch metadata for all listings
/// we do this as a separate step to not repeatedly fetch outdated metadata
/// as we process logs.
fn update_all_metadata(state: &mut State, last_saved_block: u64) {
let updated_listings = match state.db.get_listings_since_block(last_saved_block) {
Ok(listings) => listings,
Err(e) => {
print_to_terminal(
1,
&format!("error fetching updated listings since block {last_saved_block}: {e}"),
);
return;
}
};
for (pid, mut listing) in updated_listings {
let hash_note = format!("~metadata-hash.{}.{}", pid.package(), pid.publisher());
let (tba, metadata_hash) = match state.kimap.get(&hash_note) {
Ok((t, _o, data)) => {
match data {
None => {
// If metadata_uri empty, unpublish
if listing.metadata_uri.is_empty() {
if let Err(e) = state.db.delete_published(&pid) {
print_to_terminal(1, &format!("error deleting published: {e}"));
}
}
if let Err(e) = state.db.delete_listing(&pid) {
print_to_terminal(1, &format!("error deleting listing: {e}"));
}
continue;
}
Some(hash_note) => (t, String::from_utf8_lossy(&hash_note).to_string()),
}
}
Err(e) => {
// If RpcError, retry once after delay
if let eth::EthError::RpcError(_) = e {
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
match state.kimap.get(&hash_note) {
Ok((t, _o, data)) => {
if let Some(hash_note) = data {
(t, String::from_utf8_lossy(&hash_note).to_string())
} else {
// no data again after retry
if listing.metadata_uri.is_empty() {
if let Err(e) = state.db.delete_published(&pid) {
print_to_terminal(
1,
&format!("error deleting published: {e}"),
);
}
}
if let Err(e) = state.db.delete_listing(&pid) {
print_to_terminal(1, &format!("error deleting listing: {e}"));
}
continue;
}
}
Err(e2) => {
print_to_terminal(
1,
&format!("error retrieving metadata-hash after retry: {e2:?}"),
);
continue;
}
}
} else {
print_to_terminal(
1,
&format!("error retrieving metadata-hash: {e:?} for {pid}"),
);
continue;
}
}
};
// Update listing fields
listing.tba = tba;
listing.metadata_hash = metadata_hash;
let metadata =
match fetch_metadata_from_url(&listing.metadata_uri, &listing.metadata_hash, 30) {
Ok(md) => Some(md),
Err(err) => {
print_to_terminal(1, &format!("error fetching metadata for {}: {err}", pid));
None
}
};
listing.metadata = metadata.clone();
if let Err(e) = state.db.insert_or_update_listing(&pid, &listing) {
print_to_terminal(1, &format!("error updating listing {}: {e}", pid));
}
if listing.auto_update {
if let Some(md) = metadata {
print_to_terminal(0, &format!("kicking off auto-update for: {}", pid));
if let Err(e) = Request::to(("our", "downloads", "app-store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
pid.clone(),
),
metadata: md.into(),
}))
.send()
{
print_to_terminal(1, &format!("error sending auto-update request: {e}"));
}
}
}
}
}
/// create the filter used for app store getLogs and subscription.
/// the app store exclusively looks for ~metadata-uri postings: if one is
/// observed, we then *query* for ~metadata-hash to verify the content
/// at the URI.
///
/// this means that ~metadata-hash should be *posted before or at the same time* as ~metadata-uri!
pub fn app_store_filter(state: &State) -> eth::Filter {
let notes = vec![keccak256("~metadata-uri")];
eth::Filter::new()
.address(*state.kimap.address())
.events([kimap::contract::Note::SIGNATURE])
.topic3(notes)
}
/// create a filter to fetch app store event logs from chain and subscribe to new events
pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State, last_saved_block: u64) {
let filter = app_store_filter(state);
// get past logs, subscribe to new ones.
// subscribe first so we don't miss any logs
state.kimap.provider.subscribe_loop(1, filter.clone(), 1, 0);
// println!("fetching old logs from block {last_saved_block}");
for log in fetch_logs(&state.kimap.provider, &filter.from_block(last_saved_block)) {
if let Err(e) = handle_eth_log(our, state, log, true) {
print_to_terminal(1, &format!("error ingesting log: {e}"));
};
}
update_all_metadata(state, last_saved_block);
// save updated last_saved_block
if let Ok(block_number) = state.kimap.provider.get_block_number() {
state.last_saved_block = block_number;
state.db.set_last_saved_block(block_number).unwrap();
}
// println!("up to date to block {}", state.last_saved_block);
}
/// fetch logs from the chain with a given filter
fn fetch_logs(eth_provider: &eth::Provider, filter: &eth::Filter) -> Vec<eth::Log> {
loop {
match eth_provider.get_logs(filter) {
Ok(res) => return res,
Err(_) => {
println!("failed to fetch logs! trying again in 5s...");
std::thread::sleep(std::time::Duration::from_secs(5));
continue;
}
}
}
}
/// fetch metadata from url and verify it matches metadata_hash
pub fn fetch_metadata_from_url(
metadata_url: &str,
metadata_hash: &str,
timeout: u64,
) -> Result<kt::Erc721Metadata, anyhow::Error> {
if let Ok(url) = url::Url::parse(metadata_url) {
if let Ok(_) =
http::client::send_request_await_response(http::Method::GET, url, None, timeout, vec![])
{
if let Some(body) = get_blob() {
let hash = keccak_256_hash(&body.bytes);
if &hash == metadata_hash {
return Ok(serde_json::from_slice::<kt::Erc721Metadata>(&body.bytes)
.map_err(|_| anyhow::anyhow!("metadata not found"))?);
} else {
return Err(anyhow::anyhow!("metadata hash mismatch"));
}
}
}
}
Err(anyhow::anyhow!("metadata not found"))
}
/// generate a Keccak-256 hash string (with 0x prefix) of the metadata bytes
pub fn keccak_256_hash(bytes: &[u8]) -> String {
use sha3::{Digest, Keccak256};
let mut hasher = Keccak256::new();
hasher.update(bytes);
format!("0x{:x}", hasher.finalize())
}
// quite annoyingly, we must convert from our gen'd version of PackageId
// to the process_lib's gen'd version. this is in order to access custom
// Impls that we want to use
impl crate::kinode::process::main::PackageId {
pub fn to_process_lib(self) -> PackageId {
PackageId {
package_name: self.package_name,
publisher_node: self.publisher_node,
}
}
pub fn from_process_lib(package_id: PackageId) -> Self {
Self {
package_name: package_id.package_name,
publisher_node: package_id.publisher_node,
}
}
}
impl PackageListing {
pub fn to_onchain_app(&self, package_id: &PackageId) -> OnchainApp {
OnchainApp {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id.clone(),
),
tba: self.tba.to_string(),
metadata_uri: self.metadata_uri.clone(),
metadata_hash: self.metadata_hash.clone(),
metadata: self.metadata.as_ref().map(|m| m.clone().into()),
auto_update: self.auto_update,
}
}
}
impl From<kt::Erc721Metadata> for OnchainMetadata {
fn from(erc: kt::Erc721Metadata) -> Self {
OnchainMetadata {
name: erc.name,
description: erc.description,
image: erc.image,
external_url: erc.external_url,
animation_url: erc.animation_url,
properties: OnchainProperties {
package_name: erc.properties.package_name,
publisher: erc.properties.publisher,
current_version: erc.properties.current_version,
mirrors: erc.properties.mirrors,
code_hashes: erc.properties.code_hashes.into_iter().collect(),
license: erc.properties.license,
screenshots: erc.properties.screenshots,
wit_version: erc.properties.wit_version,
dependencies: erc.properties.dependencies,
},
}
}
}

View File

@ -1,5 +1,5 @@
[package]
name = "blog"
name = "download"
version = "0.1.0"
edition = "2021"
@ -8,12 +8,11 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = "0.9.4"
kinode_process_lib = "0.10.0"
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
url = "2.5.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]

View File

@ -1,8 +1,8 @@
//! download:app_store:sys
//! download:app-store:sys
//! terminal script for downloading apps from the app store.
//!
//! Usage:
//! download:app_store:sys <node_id> <package_id> <version_hash>
//! download:app-store:sys <node_id> <package_id> <version_hash>
//!
//! Arguments:
//! <node_id> The node ID to download from (e.g., my-friend.os)
@ -10,7 +10,7 @@
//! <version_hash> The version hash of the app to download
//!
//! Example:
//! download:app_store:sys my-friend.os app:publisher.os f5d374ab50e66888a7c2332b22d0f909f2e3115040725cfab98dcae488916990
//! download:app-store:sys my-friend.os app:publisher.os f5d374ab50e66888a7c2332b22d0f909f2e3115040725cfab98dcae488916990
//!
use crate::kinode::process::downloads::{DownloadRequests, LocalDownloadRequest};
use kinode_process_lib::{
@ -50,7 +50,7 @@ fn init(our: Address) {
let version_hash: String = arg3.to_string();
let Ok(_) = Request::to((our.node(), ("downloads", "app_store", "sys")))
let Ok(_) = Request::to((our.node(), ("downloads", "app-store", "sys")))
.body(DownloadRequests::LocalDownload(LocalDownloadRequest {
package_id: crate::kinode::process::main::PackageId {
package_name: package_id.package_name.clone(),
@ -61,7 +61,7 @@ fn init(our: Address) {
}))
.send()
else {
println!("download: failed to send request to downloads:app_store!");
println!("download: failed to send request to downloads:app-store!");
return;
};

View File

@ -8,8 +8,8 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
kinode_process_lib = "0.10.0"
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@ -17,7 +17,7 @@ sha2 = "0.10.8"
sha3 = "0.10.8"
url = "2.4.1"
urlencoding = "2.1.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
zip = { version = "1.1.4", default-features = false, features = ["deflate"] }
[lib]

View File

@ -0,0 +1 @@
../../ft-worker/src/ft_worker_lib.rs

View File

@ -1,5 +1,5 @@
#![feature(let_chains)]
//! downloads:app_store:sys
//! downloads:app-store:sys
//! This process manages the downloading and sharing of app packages in the Kinode ecosystem.
//! It handles both local and remote download requests, as well as file management.
//!
@ -42,13 +42,18 @@
//! mechanism is implemented in the FT worker for improved modularity and performance.
//!
use crate::kinode::process::downloads::{
AutoDownloadCompleteRequest, AutoUpdateRequest, DirEntry, DownloadCompleteRequest,
DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry, HashMismatch,
LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
AutoDownloadCompleteRequest, AutoDownloadError, AutoUpdateRequest, DirEntry,
DownloadCompleteRequest, DownloadError, DownloadRequests, DownloadResponses, Entry, FileEntry,
HashMismatch, LocalDownloadRequest, RemoteDownloadRequest, RemoveFileRequest,
};
use std::{
collections::{HashMap, HashSet},
io::Read,
str::FromStr,
};
use std::{collections::HashSet, io::Read, str::FromStr};
use ft_worker_lib::{spawn_receive_transfer, spawn_send_transfer};
use kinode::process::downloads::AutoDownloadSuccess;
use kinode_process_lib::{
await_message, call_init, get_blob, get_state,
http::client,
@ -69,7 +74,6 @@ wit_bindgen::generate!({
mod ft_worker_lib;
pub const VFS_TIMEOUT: u64 = 5; // 5s
pub const APP_SHARE_TIMEOUT: u64 = 120; // 120s
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
#[serde(untagged)] // untagged as a meta-type for all incoming responses
@ -78,6 +82,15 @@ pub enum Resp {
HttpClient(Result<client::HttpClientResponse, client::HttpClientError>),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct AutoUpdateStatus {
mirrors_left: HashSet<String>, // set(node/url)
mirrors_failed: Vec<(String, DownloadError)>, // vec(node/url, error)
active_mirror: String, // (node/url)
}
type AutoUpdates = HashMap<(PackageId, String), AutoUpdateStatus>;
#[derive(Debug, Serialize, Deserialize)]
pub struct State {
// persisted metadata about which packages we are mirroring
@ -103,27 +116,23 @@ impl State {
call_init!(init);
fn init(our: Address) {
println!("downloads: started");
// mirroring metadata is separate from vfs downloads state.
let mut state = State::load();
// /app_store:sys/downloads/
// /app-store:sys/downloads/
vfs::create_drive(our.package_id(), "downloads", None)
.expect("could not create /downloads drive");
let mut downloads =
vfs::open_dir("/app_store:sys/downloads", true, None).expect("could not open downloads");
vfs::open_dir("/app-store:sys/downloads", true, None).expect("could not open downloads");
let mut tmp =
vfs::open_dir("/app_store:sys/downloads/tmp", true, None).expect("could not open tmp");
vfs::open_dir("/app-store:sys/downloads/tmp", true, None).expect("could not open tmp");
let mut auto_updates: HashSet<(PackageId, String)> = HashSet::new();
// metadata for in-flight auto-updates
let mut auto_updates: AutoUpdates = HashMap::new();
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("downloads: got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(
&our,
@ -143,6 +152,33 @@ fn init(our: Address) {
.unwrap();
}
}
Err(send_error) => {
print_to_terminal(1, &format!("downloads: got network error: {send_error}"));
if let Some(context) = &send_error.context {
if let Ok(download_request) =
serde_json::from_slice::<LocalDownloadRequest>(&context)
{
let key = (
download_request.package_id.to_process_lib(),
download_request.desired_version_hash.clone(),
);
// Get the error first
let error = if send_error.kind.is_timeout() {
DownloadError::Timeout
} else if send_error.kind.is_offline() {
DownloadError::Offline
} else {
DownloadError::HandlingError(send_error.to_string())
};
// Then remove and get metadata
if let Some(metadata) = auto_updates.remove(&key) {
try_next_mirror(metadata, key, &mut auto_updates, error);
}
}
}
}
}
}
}
@ -157,7 +193,7 @@ fn handle_message(
message: &Message,
downloads: &mut Directory,
_tmp: &mut Directory,
auto_updates: &mut HashSet<(PackageId, String)>,
auto_updates: &mut AutoUpdates,
) -> anyhow::Result<()> {
if message.is_request() {
match message.body().try_into()? {
@ -174,8 +210,12 @@ fn handle_message(
} = download_request.clone();
if download_from.starts_with("http") {
// use http_client to GET it
Request::to(("our", "http_client", "distro", "sys"))
// use http-client to GET it
print_to_terminal(
1,
"kicking off http download for {package_id:?} and {version_hash:?}",
);
Request::to(("our", "http-client", "distro", "sys"))
.body(
serde_json::to_vec(&client::HttpClientAction::Http(
client::OutgoingHttpRequest {
@ -200,10 +240,9 @@ fn handle_message(
&package_id,
&desired_version_hash,
&download_from,
APP_SHARE_TIMEOUT,
)?;
Request::to((&download_from, "downloads", "app_store", "sys"))
Request::to((&download_from, "downloads", "app-store", "sys"))
.body(DownloadRequests::RemoteDownload(RemoteDownloadRequest {
package_id,
desired_version_hash,
@ -236,20 +275,15 @@ fn handle_message(
}
let target_worker = Address::from_str(&worker_address)?;
let _ = spawn_send_transfer(
our,
&package_id,
&desired_version_hash,
APP_SHARE_TIMEOUT,
&target_worker,
)?;
let _ =
spawn_send_transfer(our, &package_id, &desired_version_hash, &target_worker)?;
let resp = DownloadResponses::Success;
Response::new().body(&resp).send()?;
}
DownloadRequests::Progress(ref progress) => {
// forward progress to main:app_store:sys,
// forward progress to main:app-store:sys,
// pushed to UI via websockets
let _ = Request::to(("our", "main", "app_store", "sys"))
let _ = Request::to(("our", "main", "app-store", "sys"))
.body(progress)
.send();
}
@ -257,50 +291,30 @@ fn handle_message(
if !message.is_local(our) {
return Err(anyhow::anyhow!("got non local download complete"));
}
// if we have a pending auto_install, forward that context to the main process.
// it will check if the caps_hashes match (no change in capabilities), and auto_install if it does.
let manifest_hash = if auto_updates.remove(&(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
)) {
match get_manifest_hash(
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
) {
Ok(manifest_hash) => Some(manifest_hash),
Err(e) => {
print_to_terminal(
1,
&format!("auto_update: error getting manifest hash: {:?}", e),
);
None
}
}
} else {
None
};
// pushed to UI via websockets
Request::to(("our", "main", "app_store", "sys"))
// forward to main:app-store:sys, pushed to UI via websockets
Request::to(("our", "main", "app-store", "sys"))
.body(serde_json::to_vec(&req)?)
.send()?;
// trigger auto-update install trigger to main:app_store:sys
if let Some(manifest_hash) = manifest_hash {
let auto_download_complete_req = AutoDownloadCompleteRequest {
download_info: req.clone(),
manifest_hash,
};
print_to_terminal(
1,
&format!(
"auto_update download complete: triggering install on main:app_store:sys"
),
);
Request::to(("our", "main", "app_store", "sys"))
.body(serde_json::to_vec(&auto_download_complete_req)?)
.send()?;
// Check if this is an auto-update download
let key = (
req.package_id.clone().to_process_lib(),
req.version_hash.clone(),
);
if let Some(metadata) = auto_updates.remove(&key) {
if let Some(err) = req.err {
try_next_mirror(metadata, key, auto_updates, err);
} else if let Err(_e) = handle_auto_update_success(key.0.clone(), key.1.clone())
{
try_next_mirror(
metadata,
key,
auto_updates,
DownloadError::InvalidManifest,
);
}
}
}
DownloadRequests::GetFiles(maybe_id) => {
@ -401,7 +415,7 @@ fn handle_message(
}
DownloadRequests::AutoUpdate(auto_update_request) => {
if !message.is_local(&our)
&& message.source().process != ProcessId::new(Some("chain"), "app_store", "sys")
&& message.source().process != ProcessId::new(Some("chain"), "app-store", "sys")
{
return Err(anyhow::anyhow!(
"got auto-update from non local chain source"
@ -414,29 +428,61 @@ fn handle_message(
} = auto_update_request.clone();
let process_lib_package_id = package_id.clone().to_process_lib();
// default auto_update to publisher. TODO: more config here.
let download_from = metadata.properties.publisher;
// default auto_update to publisher
// let download_from = metadata.properties.publisher.clone();
let current_version = metadata.properties.current_version;
let code_hashes = metadata.properties.code_hashes;
// Create a HashSet of mirrors including the publisher
let mut mirrors = HashSet::new();
let download_from = if let Some(first_mirror) = metadata.properties.mirrors.first()
{
first_mirror.clone()
} else {
"randomnode111.os".to_string()
};
println!("first_download_from: {download_from}");
mirrors.extend(metadata.properties.mirrors.into_iter());
mirrors.insert(metadata.properties.publisher.clone());
let version_hash = code_hashes
.iter()
.find(|(version, _)| version == &current_version)
.map(|(_, hash)| hash.clone())
.ok_or_else(|| anyhow::anyhow!("auto_update: error for package_id: {}, current_version: {}, no matching hash found", process_lib_package_id.to_string(), current_version))?;
.iter()
.find(|(version, _)| version == &current_version)
.map(|(_, hash)| hash.clone())
// note, if this errors, full on failure I thnk no?
// and bubble this up.
.ok_or_else(|| anyhow::anyhow!("auto_update: error for package_id: {}, current_version: {}, no matching hash found", process_lib_package_id.to_string(), current_version))?;
print_to_terminal(
1,
&format!(
"auto_update: kicking off download for {:?} from {} with version {} from mirror {}",
package_id, download_from, version_hash, download_from
),
);
let download_request = LocalDownloadRequest {
package_id,
download_from,
download_from: download_from.clone(),
desired_version_hash: version_hash.clone(),
};
// kick off local download to ourselves.
Request::to(("our", "downloads", "app_store", "sys"))
// Initialize auto-update status with mirrors
let key = (process_lib_package_id.clone(), version_hash.clone());
auto_updates.insert(
key,
AutoUpdateStatus {
mirrors_left: mirrors,
mirrors_failed: Vec::new(),
active_mirror: download_from.clone(),
},
);
// kick off local download to ourselves
Request::to(("our", "downloads", "app-store", "sys"))
.body(DownloadRequests::LocalDownload(download_request))
.send()?;
auto_updates.insert((process_lib_package_id, version_hash));
}
_ => {}
}
@ -445,18 +491,30 @@ fn handle_message(
Resp::Download(download_response) => {
// get context of the response.
// handled are errors or ok responses from a remote node.
// check state, do action based on that!
if let Some(context) = message.context() {
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
match download_response {
DownloadResponses::Err(e) => {
Request::to(("our", "main", "app_store", "sys"))
.body(DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash: download_request.desired_version_hash.clone(),
err: Some(e),
})
.send()?;
print_to_terminal(1, &format!("downloads: got error response: {e:?}"));
let key = (
download_request.package_id.clone().to_process_lib(),
download_request.desired_version_hash.clone(),
);
if let Some(metadata) = auto_updates.remove(&key) {
try_next_mirror(metadata, key, auto_updates, e);
} else {
// If not an auto-update, forward error normally
Request::to(("our", "main", "app-store", "sys"))
.body(DownloadCompleteRequest {
package_id: download_request.package_id,
version_hash: download_request.desired_version_hash,
err: Some(e),
})
.send()?;
}
}
DownloadResponses::Success => {
// todo: maybe we do something here.
@ -474,32 +532,88 @@ fn handle_message(
}
Resp::HttpClient(resp) => {
let Some(context) = message.context() else {
return Err(anyhow::anyhow!("http_client response without context"));
return Err(anyhow::anyhow!("http-client response without context"));
};
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
if let Ok(client::HttpClientResponse::Http(client::HttpResponse {
status, ..
})) = resp
{
if status == 200 {
if let Err(e) = handle_receive_http_download(&download_request) {
print_to_terminal(
1,
&format!("error handling http_client response: {:?}", e),
);
Request::to(("our", "main", "app_store", "sys"))
.body(DownloadRequests::DownloadComplete(
DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash: download_request.desired_version_hash.clone(),
err: Some(e),
},
))
.send()?;
let key = (
download_request.package_id.clone().to_process_lib(),
download_request.desired_version_hash.clone(),
);
// Check if this is an auto-update request
let is_auto_update = auto_updates.contains_key(&key);
let metadata = if is_auto_update {
auto_updates.remove(&key)
} else {
None
};
// Handle any non-200 response or client error
let Ok(client::HttpClientResponse::Http(resp)) = resp else {
if let Some(meta) = metadata {
let error = if let Err(e) = resp {
format!("HTTP client error: {e:?}")
} else {
"unexpected response type".to_string()
};
try_next_mirror(
meta,
key,
auto_updates,
DownloadError::HandlingError(error),
);
}
return Ok(());
};
if resp.status != 200 {
let error =
DownloadError::HandlingError(format!("HTTP status {}", resp.status));
handle_download_error(
is_auto_update,
metadata,
key,
auto_updates,
error,
&download_request,
)?;
return Ok(());
}
// Handle successful download
if let Err(e) = handle_receive_http_download(&download_request) {
print_to_terminal(1, &format!("error handling http-client response: {:?}", e));
handle_download_error(
is_auto_update,
metadata,
key,
auto_updates,
e,
&download_request,
)?;
} else if is_auto_update {
match handle_auto_update_success(key.0.clone(), key.1.clone()) {
Ok(_) => print_to_terminal(
1,
&format!(
"auto_update: successfully downloaded package {:?} version {}",
&download_request.package_id,
&download_request.desired_version_hash
),
),
Err(_) => {
if let Some(meta) = metadata {
try_next_mirror(
meta,
key,
auto_updates,
DownloadError::HandlingError(
"could not get manifest hash".to_string(),
),
);
}
}
}
} else {
println!("got http_client error: {resp:?}");
}
}
}
@ -507,6 +621,70 @@ fn handle_message(
Ok(())
}
/// Try the next available mirror for a download, recording the current mirror's failure
fn try_next_mirror(
mut metadata: AutoUpdateStatus,
key: (PackageId, String),
auto_updates: &mut AutoUpdates,
error: DownloadError,
) {
print_to_terminal(
1,
&format!(
"auto_update: got error from mirror {mirror:?} {error:?}, trying next mirror: {next_mirror:?}",
next_mirror = metadata.mirrors_left.iter().next().cloned(),
mirror = metadata.active_mirror,
error = error
),
);
// Record failure and remove from available mirrors
metadata
.mirrors_failed
.push((metadata.active_mirror.clone(), error));
metadata.mirrors_left.remove(&metadata.active_mirror);
let (package_id, version_hash) = key.clone();
match metadata.mirrors_left.iter().next().cloned() {
Some(next_mirror) => {
metadata.active_mirror = next_mirror.clone();
auto_updates.insert(key, metadata);
Request::to(("our", "downloads", "app-store", "sys"))
.body(
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id,
),
download_from: next_mirror,
desired_version_hash: version_hash.clone(),
}))
.unwrap(),
)
.send()
.unwrap();
}
None => {
print_to_terminal(
1,
"auto_update: no more mirrors to try for package_id {package_id:?}",
);
// gather, and send error to main.
let node_tries = metadata.mirrors_failed;
let auto_download_error = AutoDownloadCompleteRequest::Err(AutoDownloadError {
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
version_hash,
tries: node_tries,
});
Request::to(("our", "main", "app-store", "sys"))
.body(auto_download_error)
.send()
.unwrap();
auto_updates.remove(&key);
}
}
}
fn handle_receive_http_download(
download_request: &LocalDownloadRequest,
) -> anyhow::Result<(), DownloadError> {
@ -524,7 +702,7 @@ fn handle_receive_http_download(
let bytes = get_blob().ok_or(DownloadError::BlobNotFound)?.bytes;
let package_dir = format!("{}/{}", "/app_store:sys/downloads", package_id.to_string());
let package_dir = format!("{}/{}", "/app-store:sys/downloads", package_id.to_string());
let _ = vfs::open_dir(&package_dir, true, None).map_err(|_| DownloadError::VfsError)?;
let calculated_hash = format!("{:x}", Sha256::digest(&bytes));
@ -546,7 +724,7 @@ fn handle_receive_http_download(
let manifest_path = format!("{}/{}.json", package_dir, version_hash);
extract_and_write_manifest(&bytes, &manifest_path).map_err(|_| DownloadError::VfsError)?;
Request::to(("our", "main", "app_store", "sys"))
Request::to(("our", "main", "app-store", "sys"))
.body(DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash,
@ -558,6 +736,46 @@ fn handle_receive_http_download(
Ok(())
}
fn handle_download_error(
is_auto_update: bool,
metadata: Option<AutoUpdateStatus>,
key: (PackageId, String),
auto_updates: &mut AutoUpdates,
error: impl Into<DownloadError>,
download_request: &LocalDownloadRequest,
) -> anyhow::Result<()> {
let error = error.into();
if is_auto_update {
if let Some(meta) = metadata {
try_next_mirror(meta, key, auto_updates, error);
}
} else {
Request::to(("our", "main", "app-store", "sys"))
.body(DownloadCompleteRequest {
package_id: download_request.package_id.clone(),
version_hash: download_request.desired_version_hash.clone(),
err: Some(error),
})
.send()?;
}
Ok(())
}
/// Handle auto-update success case by getting manifest hash and sending completion message
fn handle_auto_update_success(package_id: PackageId, version_hash: String) -> anyhow::Result<()> {
let manifest_hash = get_manifest_hash(package_id.clone(), version_hash.clone())?;
Request::to(("our", "main", "app-store", "sys"))
.body(AutoDownloadCompleteRequest::Success(AutoDownloadSuccess {
package_id: crate::kinode::process::main::PackageId::from_process_lib(package_id),
version_hash,
manifest_hash,
}))
.send()
.unwrap();
Ok(())
}
fn format_entries(entries: Vec<vfs::DirEntry>, state: &State) -> Vec<Entry> {
entries
.into_iter()
@ -617,7 +835,7 @@ fn extract_and_write_manifest(file_contents: &[u8], manifest_path: &str) -> anyh
/// Used to check if we can share a package or not!
fn download_zip_exists(package_id: &PackageId, version_hash: &str) -> bool {
let filename = format!(
"/app_store:sys/downloads/{}:{}/{}.zip",
"/app-store:sys/downloads/{}:{}/{}.zip",
package_id.package_name,
package_id.publisher(),
version_hash
@ -630,7 +848,7 @@ fn download_zip_exists(package_id: &PackageId, version_hash: &str) -> bool {
}
fn get_manifest_hash(package_id: PackageId, version_hash: String) -> anyhow::Result<String> {
let package_dir = format!("{}/{}", "/app_store:sys/downloads", package_id.to_string());
let package_dir = format!("{}/{}", "/app-store:sys/downloads", package_id.to_string());
let manifest_path = format!("{}/{}.json", package_dir, version_hash);
let manifest_file = vfs::open_file(&manifest_path, false, None)?;

View File

@ -1,5 +1,5 @@
[package]
name = "ft_worker"
name = "ft-worker"
version = "0.2.0"
edition = "2021"
@ -9,13 +9,13 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
kinode_process_lib = "0.10.0"
process_macros = "0.1"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10.8"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
zip = { version = "1.1.4", default-features = false, features = ["deflate"] }
[lib]

View File

@ -17,33 +17,29 @@ pub fn spawn_send_transfer(
our: &Address,
package_id: &PackageId,
version_hash: &str,
timeout: u64,
to_addr: &Address,
) -> anyhow::Result<()> {
let transfer_id: u64 = rand::random();
let timer_id = ProcessId::new(Some("timer"), "distro", "sys");
let Ok(worker_process_id) = spawn(
Some(&transfer_id.to_string()),
&format!("{}/pkg/ft_worker.wasm", our.package_id()),
&format!("{}/pkg/ft-worker.wasm", our.package_id()),
OnExit::None,
our_capabilities(),
vec![timer_id],
vec![(timer_id, "\"messaging\"".to_string())],
false,
) else {
return Err(anyhow::anyhow!("failed to spawn ft_worker!"));
return Err(anyhow::anyhow!("failed to spawn ft-worker!"));
};
let req = Request::new()
.target((&our.node, worker_process_id))
.expects_response(timeout + 1)
.body(
serde_json::to_vec(&DownloadRequests::RemoteDownload(RemoteDownloadRequest {
package_id: package_id.clone(),
desired_version_hash: version_hash.to_string(),
worker_address: to_addr.to_string(),
}))
.unwrap(),
);
let req = Request::new().target((&our.node, worker_process_id)).body(
serde_json::to_vec(&DownloadRequests::RemoteDownload(RemoteDownloadRequest {
package_id: package_id.clone(),
desired_version_hash: version_hash.to_string(),
worker_address: to_addr.to_string(),
}))
.unwrap(),
);
req.send()?;
Ok(())
}
@ -58,24 +54,22 @@ pub fn spawn_receive_transfer(
package_id: &PackageId,
version_hash: &str,
from_node: &str,
timeout: u64,
) -> anyhow::Result<Address> {
let transfer_id: u64 = rand::random();
let timer_id = ProcessId::new(Some("timer"), "distro", "sys");
let Ok(worker_process_id) = spawn(
Some(&transfer_id.to_string()),
&format!("{}/pkg/ft_worker.wasm", our.package_id()),
&format!("{}/pkg/ft-worker.wasm", our.package_id()),
OnExit::None,
our_capabilities(),
vec![timer_id],
vec![(timer_id, "\"messaging\"".to_string())],
false,
) else {
return Err(anyhow::anyhow!("failed to spawn ft_worker!"));
return Err(anyhow::anyhow!("failed to spawn ft-worker!"));
};
let req = Request::new()
.target((&our.node, worker_process_id.clone()))
.expects_response(timeout + 1)
.body(
serde_json::to_vec(&DownloadRequests::LocalDownload(LocalDownloadRequest {
package_id: package_id.clone(),

View File

@ -1,4 +1,4 @@
//! {ft_worker_id}:app_store:sys
//! {ft_worker_id}:app-store:sys
//! This process implements the file transfer functionality for the App Store system in the Kinode ecosystem.
//! It handles the chunked transfer of app package files between nodes, including download initiation,
//! progress tracking, and integrity verification.
@ -29,6 +29,7 @@
//!
//! - Hash mismatches between the received file and the expected hash are detected and reported.
//! - Various I/O errors are caught and propagated.
//! - A 120 second killswitch is implemented to clean up dangling transfers.
//!
//! ## Integration with App Store:
//!
@ -61,6 +62,7 @@ wit_bindgen::generate!({
});
const CHUNK_SIZE: u64 = 262144; // 256KB
const KILL_SWITCH_MS: u64 = 120000; // 2 minutes
call_init!(init);
fn init(our: Address) {
@ -78,8 +80,7 @@ fn init(our: Address) {
}
// killswitch timer, 2 minutes. sender or receiver gets killed/cleaned up.
// TODO: killswitch update bubbles up to downloads process?
timer::set_timer(120000, None);
timer::set_timer(KILL_SWITCH_MS, None);
let start = std::time::Instant::now();
@ -105,7 +106,23 @@ fn init(our: Address) {
start.elapsed().as_millis()
),
),
Err(e) => print_to_terminal(1, &format!("ft_worker: receive error: {}", e)),
Err(e) => {
print_to_terminal(1, &format!("ft_worker: receive error: {}", e));
// bubble up to parent.
// TODO: doublecheck this.
// if this fires on a basic timeout, that's bad.
Request::new()
.body(DownloadRequests::DownloadComplete(
DownloadCompleteRequest {
package_id: package_id.clone().into(),
version_hash: desired_version_hash.to_string(),
err: Some(DownloadError::HandlingError(e.to_string())),
},
))
.target(parent_process)
.send()
.unwrap();
}
}
}
DownloadRequests::RemoteDownload(remote_request) => {
@ -139,7 +156,7 @@ fn handle_sender(worker: &str, package_id: &PackageId, version_hash: &str) -> an
let target_worker = Address::from_str(worker)?;
let filename = format!(
"/app_store:sys/downloads/{}:{}/{}.zip",
"/app-store:sys/downloads/{}:{}/{}.zip",
package_id.package_name, package_id.publisher_node, version_hash
);
@ -176,7 +193,7 @@ fn handle_receiver(
let package_dir = vfs::open_dir(
&format!(
"/app_store:sys/downloads/{}:{}/",
"/app-store:sys/downloads/{}:{}/",
package_id.package_name,
package_id.publisher(),
),
@ -187,6 +204,17 @@ fn handle_receiver(
loop {
let message = await_message()?;
if *message.source() == timer_address {
// send error message to downloads process
Request::new()
.body(DownloadRequests::DownloadComplete(
DownloadCompleteRequest {
package_id: package_id.clone().into(),
version_hash: version_hash.to_string(),
err: Some(DownloadError::Timeout),
},
))
.target(parent_process.clone())
.send()?;
return Ok(());
}
if !message.is_request() {

View File

@ -0,0 +1,21 @@
[package]
name = "install"
version = "0.1.0"
edition = "2021"
[features]
simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.10.0"
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

View File

@ -1,8 +1,8 @@
//! install:app_store:sys
//! install:app-store:sys
//! terminal script for installing apps from the app store.
//!
//! Usage:
//! install:app_store:sys <package_id> <version_hash>
//! install:app-store:sys <package_id> <version_hash>
//!
//! Arguments:
//! <package_id> The package ID of the app (e.g., app:publisher.os)
@ -49,7 +49,7 @@ fn init(our: Address) {
let version_hash = args[1].to_string();
let Ok(Ok(Message::Response { body, .. })) =
Request::to((our.node(), ("main", "app_store", "sys")))
Request::to((our.node(), ("main", "app-store", "sys")))
.body(LocalRequest::Install(InstallPackageRequest {
package_id: crate::kinode::process::main::PackageId {
package_name: package_id.package_name.clone(),
@ -60,12 +60,12 @@ fn init(our: Address) {
}))
.send_and_await_response(5)
else {
println!("install: failed to get a response from app_store..!");
println!("install: failed to get a response from app-store..!");
return;
};
let Ok(response) = body.try_into() else {
println!("install: failed to parse response from app_store..!");
println!("install: failed to parse response from app-store..!");
return;
};
@ -78,7 +78,7 @@ fn init(our: Address) {
println!("make sure that the package has been downloaded!")
}
_ => {
println!("install: unexpected response from app_store..!");
println!("install: unexpected response from app-store..!");
return;
}
}

View File

@ -1,16 +1,16 @@
{
"name": "App Store",
"description": "A package manager + app store.",
"description": "A package manager + app store for Kinode OS.",
"image": "",
"properties": {
"package_name": "app_store",
"package_name": "app-store",
"publisher": "sys",
"current_version": "0.3.1",
"current_version": "1.0.0",
"mirrors": [],
"code_hashes": {
"0.3.1": ""
"1.0.0": ""
},
"wit_version": 0,
"wit_version": 1,
"dependencies": []
},
"external_url": "https://kinode.org",

View File

@ -5,10 +5,10 @@
"on_exit": "Restart",
"request_networking": true,
"request_capabilities": [
"http_client:distro:sys",
"http_server:distro:sys",
"main:app_store:sys",
"chain:app_store:sys",
"http-client:distro:sys",
"http-server:distro:sys",
"main:app-store:sys",
"chain:app-store:sys",
"terminal:terminal:sys",
"vfs:distro:sys",
{
@ -19,10 +19,10 @@
}
],
"grant_capabilities": [
"http_server:distro:sys",
"http-server:distro:sys",
"vfs:distro:sys",
"terminal:terminal:sys",
"http_client:distro:sys"
"http-client:distro:sys"
],
"public": false
},
@ -32,13 +32,14 @@
"on_exit": "Restart",
"request_networking": true,
"request_capabilities": [
"main:app_store:sys",
"downloads:app_store:sys",
"main:app-store:sys",
"downloads:app-store:sys",
"vfs:distro:sys",
"kns_indexer:kns_indexer:sys",
"kns-indexer:kns-indexer:sys",
"eth:distro:sys",
"http_server:distro:sys",
"http_client:distro:sys",
"http-server:distro:sys",
"http-client:distro:sys",
"sqlite:distro:sys",
{
"process": "vfs:distro:sys",
"params": {
@ -47,29 +48,34 @@
}
],
"grant_capabilities": [
"http_server:distro:sys",
"kns_indexer:kns_indexer:sys",
"http-server:distro:sys",
"kns-indexer:kns-indexer:sys",
"vfs:distro:sys",
"http_client:distro:sys",
"http-client:distro:sys",
"eth:distro:sys",
"sqlite:distro:sys",
"timer:distro:sys"
],
"public": false
},
{
"process_name": "main",
"process_wasm_path": "/app_store.wasm",
"process_wasm_path": "/app-store.wasm",
"on_exit": "Restart",
"request_networking": true,
"request_capabilities": [
"terminal:terminal:sys",
"filesystem:distro:sys",
"homepage:homepage:sys",
"http_server:distro:sys",
"http_client:distro:sys",
"http-server:distro:sys",
"http-client:distro:sys",
{
"process": "homepage:homepage:sys",
"params": "RemoveOther"
},
"net:distro:sys",
"downloads:app_store:sys",
"chain:app_store:sys",
"downloads:app-store:sys",
"chain:app-store:sys",
"vfs:distro:sys",
"kernel:distro:sys",
"eth:distro:sys",
@ -82,7 +88,7 @@
"sqlite:distro:sys",
"kv:distro:sys",
"chess:chess:sys",
"kns_indexer:kns_indexer:sys",
"kns-indexer:kns-indexer:sys",
{
"process": "vfs:distro:sys",
"params": {
@ -93,12 +99,12 @@
"grant_capabilities": [
"eth:distro:sys",
"net:distro:sys",
"http_client:distro:sys",
"http_server:distro:sys",
"kns_indexer:kns_indexer:sys",
"http-client:distro:sys",
"http-server:distro:sys",
"kns-indexer:kns-indexer:sys",
"terminal:terminal:sys",
"vfs:distro:sys"
],
"public": false
}
]
]

View File

@ -4,37 +4,37 @@
"public": false,
"request_networking": false,
"request_capabilities": [
"main:app_store:sys",
"downloads:app_store:sys"
"main:app-store:sys",
"downloads:app-store:sys"
],
"grant_capabilities": [
"main:app_store:sys",
"downloads:app_store:sys"
"main:app-store:sys",
"downloads:app-store:sys"
],
"wit_version": 0
"wit_version": 1
},
"install.wasm": {
"root": false,
"public": false,
"request_networking": false,
"request_capabilities": [
"main:app_store:sys"
"main:app-store:sys"
],
"grant_capabilities": [
"main:app_store:sys"
"main:app-store:sys"
],
"wit_version": 0
"wit_version": 1
},
"uninstall.wasm": {
"root": false,
"public": false,
"request_networking": false,
"request_capabilities": [
"main:app_store:sys"
"main:app-store:sys"
],
"grant_capabilities": [
"main:app_store:sys"
"main:app-store:sys"
],
"wit_version": 0
"wit_version": 1
}
}

View File

@ -1,11 +1,16 @@
import React from 'react';
import { Link } from 'react-router-dom';
import { Link, useLocation } from 'react-router-dom';
import { STORE_PATH, PUBLISH_PATH, MY_APPS_PATH } from '../constants/path';
import { ConnectButton } from '@rainbow-me/rainbowkit';
import { FaHome } from "react-icons/fa";
import NotificationBay from './NotificationBay';
import useAppsStore from '../store';
const Header: React.FC = () => {
const location = useLocation();
const { updates } = useAppsStore();
const updateCount = Object.keys(updates || {}).length;
return (
<header className="app-header">
<div className="header-left">
@ -15,7 +20,10 @@ const Header: React.FC = () => {
</button>
<Link to={STORE_PATH} className={location.pathname === STORE_PATH ? 'active' : ''}>Apps</Link>
<Link to={PUBLISH_PATH} className={location.pathname === PUBLISH_PATH ? 'active' : ''}>Publish</Link>
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>My Apps</Link>
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>
My Apps
{updateCount > 0 && <span className="update-badge">{updateCount}</span>}
</Link>
</nav>
</div>
<div className="header-right">
@ -25,4 +33,5 @@ const Header: React.FC = () => {
</header>
);
};
export default Header;

View File

@ -8,12 +8,12 @@ interface ManifestDisplayProps {
const capabilityMap: Record<string, string> = {
'vfs:distro:sys': 'Virtual Filesystem',
'http_client:distro:sys': 'HTTP Client',
'http_server:distro:sys': 'HTTP Server',
'http-client:distro:sys': 'HTTP Client',
'http-server:distro:sys': 'HTTP Server',
'eth:distro:sys': 'Ethereum RPC access',
'homepage:homepage:sys': 'Ability to add itself to homepage',
'main:app_store:sys': 'App Store',
'chain:app_store:sys': 'Chain',
'main:app-store:sys': 'App Store',
'chain:app-store:sys': 'Chain',
'terminal:terminal:sys': 'Terminal',
};

View File

@ -0,0 +1,69 @@
import React, { useState } from 'react';
import { FaExclamationTriangle } from 'react-icons/fa';
import useAppsStore from '../store';
const ResetButton: React.FC = () => {
const resetStore = useAppsStore(state => state.resetStore);
const [isOpen, setIsOpen] = useState(false);
const [isLoading, setIsLoading] = useState(false);
const handleReset = async () => {
try {
setIsLoading(true);
await resetStore();
setIsOpen(false);
} catch (error) {
console.error('Reset failed:', error);
alert('Failed to reset the app store. Please try again.');
} finally {
setIsLoading(false);
}
};
return (
<>
<button
onClick={() => setIsOpen(true)}
className="button danger"
style={{ fontSize: '0.9rem' }}
>
Reset Store
</button>
{isOpen && (
<div className="modal-overlay" onClick={() => setIsOpen(false)}>
<div className="modal-content" onClick={e => e.stopPropagation()}>
<button className="modal-close" onClick={() => setIsOpen(false)}>×</button>
<div style={{ display: 'flex', alignItems: 'center', gap: '0.75rem', marginBottom: '1rem' }}>
<FaExclamationTriangle size={24} style={{ color: 'var(--red)' }} />
<h3 style={{ margin: 0 }}>Warning</h3>
</div>
<p style={{ marginBottom: '1.5rem' }}>
This action will re-index all apps and reset the store state.
Only proceed if you know what you're doing.
</p>
<div style={{ display: 'flex', justifyContent: 'flex-end', gap: '0.75rem' }}>
<button
onClick={() => setIsOpen(false)}
className="button"
>
Cancel
</button>
<button
onClick={handleReset}
disabled={isLoading}
className="button danger"
>
{isLoading ? 'Resetting...' : 'Reset Store'}
</button>
</div>
</div>
</div>
)}
</>
);
};
export default ResetButton;

View File

@ -0,0 +1,16 @@
import React from 'react';
interface TooltipProps {
content: React.ReactNode;
children?: React.ReactNode;
}
export function Tooltip({ content, children }: TooltipProps) {
return (
<div className="tooltip-container">
{children}
<span className="tooltip-icon"></span>
<div className="tooltip-content">{content}</div>
</div>
);
}

View File

@ -2,4 +2,5 @@ export { default as Header } from './Header';
export { default as MirrorSelector } from './MirrorSelector';
export { default as PackageSelector } from './PackageSelector';
export { default as ManifestDisplay } from './ManifestDisplay';
export { default as NotificationBay } from './NotificationBay';
export { default as NotificationBay } from './NotificationBay';
export { default as ResetButton } from './ResetButton';

File diff suppressed because it is too large Load Diff

View File

@ -148,6 +148,12 @@ export default function AppPage() {
{latestVersion && (
<li><span>Latest Version:</span> <span>{latestVersion}</span></li>
)}
{installedApp?.pending_update_hash && (
<li className="warning">
<span>Failed Auto-Update:</span>
<span>Update to version with hash {installedApp.pending_update_hash.slice(0, 8)}... failed, approve newly requested capabilities and install it here:</span>
</li>
)}
<li><span>Publisher:</span> <span>{app.package_id.publisher_node}</span></li>
<li><span>License:</span> <span>{app.metadata?.properties?.license || "Not specified"}</span></li>
<li>

View File

@ -0,0 +1,484 @@
import React, { useState, useEffect } from "react";
import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck, FaTrash, FaExclamationTriangle, FaTimesCircle, FaChevronDown, FaChevronRight } from "react-icons/fa";
import { useNavigate } from "react-router-dom";
import useAppsStore from "../store";
import { ResetButton } from "../components";
import { DownloadItem, PackageManifestEntry, PackageState, Updates, DownloadError, UpdateInfo } from "../types/Apps";
// Core packages that cannot be uninstalled
const CORE_PACKAGES = [
"app-store:sys",
"chess:sys",
"contacts:sys",
"homepage:sys",
"kns-indexer:sys",
"settings:sys",
"terminal:sys",
];
export default function MyAppsPage() {
const navigate = useNavigate();
const {
fetchDownloads,
fetchDownloadsForApp,
startMirroring,
stopMirroring,
installApp,
removeDownload,
fetchInstalled,
installed,
uninstallApp,
fetchUpdates,
clearUpdates,
updates
} = useAppsStore();
const [currentPath, setCurrentPath] = useState<string[]>([]);
const [items, setItems] = useState<DownloadItem[]>([]);
const [expandedUpdates, setExpandedUpdates] = useState<Set<string>>(new Set());
const [isInstalling, setIsInstalling] = useState(false);
const [isUninstalling, setIsUninstalling] = useState(false);
const [error, setError] = useState<string | null>(null);
const [showCapApproval, setShowCapApproval] = useState(false);
const [manifest, setManifest] = useState<PackageManifestEntry | null>(null);
const [selectedItem, setSelectedItem] = useState<DownloadItem | null>(null);
const [showUninstallConfirm, setShowUninstallConfirm] = useState(false);
const [appToUninstall, setAppToUninstall] = useState<any>(null);
useEffect(() => {
loadItems();
fetchInstalled();
fetchUpdates();
}, [currentPath]);
const loadItems = async () => {
try {
let downloads: DownloadItem[];
if (currentPath.length === 0) {
downloads = await fetchDownloads();
} else {
downloads = await fetchDownloadsForApp(currentPath.join(':'));
}
setItems(downloads);
} catch (error) {
console.error("Error loading items:", error);
setError(`Error loading items: ${error instanceof Error ? error.message : String(error)}`);
}
};
const handleClearUpdates = async (packageId: string) => {
await clearUpdates(packageId);
fetchUpdates(); // Refresh updates after clearing
};
const toggleUpdateExpansion = (packageId: string) => {
setExpandedUpdates(prev => {
const newSet = new Set(prev);
if (newSet.has(packageId)) {
newSet.delete(packageId);
} else {
newSet.add(packageId);
}
return newSet;
});
};
const formatError = (error: DownloadError): string => {
if (typeof error === 'string') {
return error;
} else if ('HashMismatch' in error) {
return `Hash mismatch (expected ${error.HashMismatch.desired.slice(0, 8)}, got ${error.HashMismatch.actual.slice(0, 8)})`;
} else if ('HandlingError' in error) {
return error.HandlingError;
} else if ('Timeout' in error) {
return 'Connection timed out';
}
return 'Unknown error';
};
const renderUpdates = () => {
if (!updates || Object.keys(updates).length === 0) {
return (
<div className="updates-section">
<h2>Failed Auto Updates (0)</h2>
<p>None found, all clear!</p>
</div>
);
}
return (
<div className="updates-section">
<h2 className="section-title">Failed Auto Updates ({Object.keys(updates).length})</h2>
{Object.keys(updates).length > 0 ? (
<div className="updates-list">
{Object.entries(updates).map(([packageId, versionMap]) => {
const totalErrors = Object.values(versionMap).reduce((sum, info) =>
sum + (info.errors?.length || 0), 0);
const hasManifestChanges = Object.values(versionMap).some(info =>
info.pending_manifest_hash);
return (
<div key={packageId} className="update-item error">
<div className="update-header" onClick={() => toggleUpdateExpansion(packageId)}>
<div className="update-title">
{expandedUpdates.has(packageId) ? <FaChevronDown /> : <FaChevronRight />}
<FaExclamationTriangle className="error-badge" />
<span>{packageId}</span>
<div className="update-summary">
{totalErrors > 0 && (
<span className="error-count">{totalErrors} error{totalErrors !== 1 ? 's' : ''}</span>
)}
{hasManifestChanges && (
<span className="manifest-badge">Manifest changes pending</span>
)}
</div>
</div>
<div className="update-actions">
<button
className="action-button retry"
onClick={(e) => {
e.stopPropagation();
navigate(`/download/${packageId}`);
}}
title="Retry download"
>
<FaSync />
<span>Retry</span>
</button>
<button
className="action-button clear"
onClick={(e) => {
e.stopPropagation();
handleClearUpdates(packageId);
}}
title="Clear update info"
>
<FaTimesCircle />
</button>
</div>
</div>
{expandedUpdates.has(packageId) && Object.entries(versionMap).map(([versionHash, info]) => (
<div key={versionHash} className="update-details">
<div className="version-info">
Version: {versionHash.slice(0, 8)}...
</div>
{info.pending_manifest_hash && (
<div className="manifest-info">
<FaExclamationTriangle />
Pending manifest: {info.pending_manifest_hash.slice(0, 8)}...
</div>
)}
{info.errors && info.errors.length > 0 && (
<div className="error-list">
{info.errors.map(([source, error], idx) => (
<div key={idx} className="error-item">
<FaExclamationTriangle className="error-icon" />
<span>{source}: {formatError(error)}</span>
</div>
))}
</div>
)}
</div>
))}
</div>
);
})}
</div>
) : (
<div className="empty-state">
No failed auto updates found.
</div>
)}
</div>
);
};
const navigateToItem = (item: DownloadItem) => {
if (item.Dir) {
setCurrentPath([...currentPath, item.Dir.name]);
}
};
const navigateUp = () => {
setCurrentPath(currentPath.slice(0, -1));
};
const toggleMirroring = async (item: DownloadItem) => {
if (item.Dir) {
const packageId = [...currentPath, item.Dir.name].join(':');
try {
if (item.Dir.mirroring) {
await stopMirroring(packageId);
} else {
await startMirroring(packageId);
}
await loadItems();
} catch (error) {
console.error("Error toggling mirroring:", error);
setError(`Error toggling mirroring: ${error instanceof Error ? error.message : String(error)}`);
}
}
};
const handleInstall = async (item: DownloadItem) => {
if (item.File) {
setSelectedItem(item);
try {
const manifestData = JSON.parse(item.File.manifest);
setManifest(manifestData);
setShowCapApproval(true);
} catch (error) {
console.error('Failed to parse manifest:', error);
setError(`Failed to parse manifest: ${error instanceof Error ? error.message : String(error)}`);
}
}
};
const confirmInstall = async () => {
if (!selectedItem?.File) return;
setIsInstalling(true);
setError(null);
try {
const fileName = selectedItem.File.name;
const parts = fileName.split(':');
const versionHash = parts.pop()?.replace('.zip', '');
if (!versionHash) throw new Error('Invalid file name format');
const packageId = [...currentPath, ...parts].join(':');
await installApp(packageId, versionHash);
await fetchInstalled();
setShowCapApproval(false);
await loadItems();
} catch (error) {
console.error('Installation failed:', error);
setError(`Installation failed: ${error instanceof Error ? error.message : String(error)}`);
} finally {
setIsInstalling(false);
}
};
const handleRemoveDownload = async (item: DownloadItem) => {
if (item.File) {
try {
const packageId = currentPath.join(':');
const versionHash = item.File.name.replace('.zip', '');
await removeDownload(packageId, versionHash);
await loadItems();
} catch (error) {
console.error('Failed to remove download:', error);
setError(`Failed to remove download: ${error instanceof Error ? error.message : String(error)}`);
}
}
};
const isAppInstalled = (name: string): boolean => {
const packageName = name.replace('.zip', '');
return Object.values(installed).some(app => app.package_id.package_name === packageName);
};
const initiateUninstall = (app: any) => {
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
if (CORE_PACKAGES.includes(packageId)) {
setError("Cannot uninstall core system packages");
return;
}
setAppToUninstall(app);
setShowUninstallConfirm(true);
};
const handleUninstall = async () => {
if (!appToUninstall) return;
setIsUninstalling(true);
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
try {
await uninstallApp(packageId);
await fetchInstalled();
await loadItems();
setShowUninstallConfirm(false);
setAppToUninstall(null);
} catch (error) {
console.error('Uninstallation failed:', error);
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
} finally {
setIsUninstalling(false);
}
};
return (
<div className="my-apps-page">
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: '2rem' }}>
<h1>My Apps</h1>
<ResetButton />
</div>
{error && <div className="error-message">{error}</div>}
{renderUpdates()}
{/* Navigation */}
<div className="navigation">
{currentPath.length > 0 && (
<button onClick={() => setCurrentPath([])} className="nav-button">
<FaChevronLeft /> Back
</button>
)}
<div className="current-path">
{currentPath.length === 0 ? 'Downloads' : currentPath.join('/')}
</div>
</div>
{/* Items Table */}
<div className="items-table-container">
<div className="file-explorer">
<h3>Installed Apps</h3>
<table className="downloads-table">
<thead>
<tr>
<th>Package ID</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{Object.values(installed).map((app) => {
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
const isCore = CORE_PACKAGES.includes(packageId);
return (
<tr key={packageId}>
<td>{packageId}</td>
<td>
{isCore ? (
<span className="core-package">Core Package</span>
) : (
<button
onClick={() => initiateUninstall(app)}
disabled={isUninstalling}
>
{isUninstalling ? <FaSpinner className="fa-spin" /> : <FaTrash />}
Uninstall
</button>
)}
</td>
</tr>
);
})}
</tbody>
</table>
</div>
<div className="file-explorer">
<h3>Downloads</h3>
<div className="path-navigation">
{currentPath.length > 0 && (
<button onClick={navigateUp} className="navigate-up">
<FaChevronLeft /> Back
</button>
)}
<span className="current-path">/{currentPath.join('/')}</span>
</div>
<table className="downloads-table">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Size</th>
<th>Mirroring</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{items.map((item, index) => {
const isFile = !!item.File;
const name = isFile ? item.File!.name : item.Dir!.name;
const isInstalled = isFile && isAppInstalled(name);
return (
<tr key={index} onClick={() => navigateToItem(item)} className={isFile ? 'file' : 'directory'}>
<td>
{isFile ? <FaFile /> : <FaFolder />} {name}
</td>
<td>{isFile ? 'File' : 'Directory'}</td>
<td>{isFile ? `${(item.File!.size / 1024).toFixed(2)} KB` : '-'}</td>
<td>{!isFile && (item.Dir!.mirroring ? 'Yes' : 'No')}</td>
<td>
{!isFile && (
<button onClick={(e) => { e.stopPropagation(); toggleMirroring(item); }}>
<FaSync /> {item.Dir!.mirroring ? 'Stop' : 'Start'} Mirroring
</button>
)}
{isFile && !isInstalled && (
<>
<button onClick={(e) => { e.stopPropagation(); handleInstall(item); }}>
<FaRocket /> Install
</button>
<button onClick={(e) => { e.stopPropagation(); handleRemoveDownload(item); }}>
<FaTrash /> Delete
</button>
</>
)}
{isFile && isInstalled && (
<FaCheck className="installed" />
)}
</td>
</tr>
);
})}
</tbody>
</table>
</div>
</div>
{/* Uninstall Confirmation Modal */}
{showUninstallConfirm && appToUninstall && (
<div className="cap-approval-popup">
<div className="cap-approval-content">
<h3>Confirm Uninstall</h3>
<div className="warning-message">
Are you sure you want to uninstall this app?
</div>
<div className="package-info">
<strong>Package ID:</strong> {`${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`}
</div>
{appToUninstall.metadata?.name && (
<div className="package-info">
<strong>Name:</strong> {appToUninstall.metadata.name}
</div>
)}
<div className="approval-buttons">
<button
onClick={() => {
setShowUninstallConfirm(false);
setAppToUninstall(null);
}}
>
Cancel
</button>
<button
onClick={handleUninstall}
disabled={isUninstalling}
className="danger"
>
{isUninstalling ? <FaSpinner className="fa-spin" /> : 'Confirm Uninstall'}
</button>
</div>
</div>
</div>
)}
{showCapApproval && manifest && (
<div className="cap-approval-popup">
<div className="cap-approval-content">
<h3>Approve Capabilities</h3>
<pre className="json-display">
{JSON.stringify(manifest[0]?.request_capabilities || [], null, 2)}
</pre>
<div className="approval-buttons">
<button onClick={() => setShowCapApproval(false)}>Cancel</button>
<button onClick={confirmInstall} disabled={isInstalling}>
{isInstalling ? <FaSpinner className="fa-spin" /> : 'Approve and Install'}
</button>
</div>
</div>
</div>
)}
</div>
);
}

View File

@ -5,14 +5,13 @@ import { DownloadItem, PackageManifest, PackageState } from "../types/Apps";
// Core packages that cannot be uninstalled
const CORE_PACKAGES = [
"app_store:sys",
"contacts:sys",
"kino_updates:sys",
"terminal:sys",
"app-store:sys",
"chess:sys",
"kns_indexer:sys",
"contacts:sys",
"homepage:sys",
"kns-indexer:sys",
"settings:sys",
"homepage:sys"
"terminal:sys",
];
export default function MyAppsPage() {

View File

@ -7,12 +7,13 @@ import { mechAbi, KIMAP, encodeIntoMintCall, encodeMulticalls, kimapAbi, MULTICA
import { kinohash } from '../utils/kinohash';
import useAppsStore from "../store";
import { PackageSelector } from "../components";
import { Tooltip } from '../components/Tooltip';
const NAME_INVALID = "Package name must contain only valid characters (a-z, 0-9, -, and .)";
export default function PublishPage() {
const { openConnectModal } = useConnectModal();
const { ourApps, fetchOurApps, downloads } = useAppsStore();
const { ourApps, fetchOurApps, downloads, fetchDownloadsForApp } = useAppsStore();
const publicClient = usePublicClient();
const { address, isConnected, isConnecting } = useAccount();
@ -23,6 +24,7 @@ export default function PublishPage() {
});
const [packageName, setPackageName] = useState<string>("");
// @ts-ignore
const [publisherId, setPublisherId] = useState<string>(window.our?.node || "");
const [metadataUrl, setMetadataUrl] = useState<string>("");
const [metadataHash, setMetadataHash] = useState<string>("");
@ -34,6 +36,26 @@ export default function PublishPage() {
fetchOurApps();
}, [fetchOurApps]);
useEffect(() => {
if (packageName && publisherId) {
const id = `${packageName}:${publisherId}`;
fetchDownloadsForApp(id);
}
}, [packageName, publisherId, fetchDownloadsForApp]);
useEffect(() => {
if (isConfirmed) {
// Fetch our apps again after successful publish
fetchOurApps();
// Reset form fields
setPackageName("");
// @ts-ignore
setPublisherId(window.our?.node || "");
setMetadataUrl("");
setMetadataHash("");
}
}, [isConfirmed, fetchOurApps]);
const validatePackageName = useCallback((name: string) => {
// Allow lowercase letters, numbers, hyphens, and dots
const validNameRegex = /^[a-z0-9.-]+$/;
@ -69,9 +91,12 @@ export default function PublishPage() {
// Check if code_hashes exist in metadata and is an object
if (metadata.properties && metadata.properties.code_hashes && typeof metadata.properties.code_hashes === 'object') {
const codeHashes = metadata.properties.code_hashes;
const missingHashes = Object.entries(codeHashes).filter(([version, hash]) =>
!downloads[`${packageName}:${publisherId}`]?.some(d => d.File?.name === `${hash}.zip`)
);
console.log('Available downloads:', downloads[`${packageName}:${publisherId}`]);
const missingHashes = Object.entries(codeHashes).filter(([version, hash]) => {
const hasDownload = downloads[`${packageName}:${publisherId}`]?.some(d => d.File?.name === `${hash}.zip`);
return !hasDownload;
});
if (missingHashes.length > 0) {
setMetadataError(`Missing local downloads for mirroring versions: ${missingHashes.map(([version]) => version).join(', ')}`);
@ -163,12 +188,6 @@ export default function PublishPage() {
gas: BigInt(1000000),
});
// Reset form fields
setPackageName("");
setPublisherId(window.our?.node || "");
setMetadataUrl("");
setMetadataHash("");
} catch (error) {
console.error(error);
}
@ -223,22 +242,31 @@ export default function PublishPage() {
return (
<div className="publish-page">
<h1>Publish Package</h1>
{Boolean(address) && (
<div className="publisher-info">
<span>Publishing as:</span>
<span className="address">{address?.slice(0, 4)}...{address?.slice(-4)}</span>
{!address ? (
<div className="wallet-status">
<button onClick={() => openConnectModal?.()}>Connect Wallet</button>
</div>
) : (
<div className="wallet-status">
Connected: {address.slice(0, 6)}...{address.slice(-4)}
<Tooltip content="Make sure the wallet you're connecting to publish is the same as the owner for the publisher!" />
</div>
)}
{isConfirming ? (
<div className="message info">Publishing package...</div>
<div className="message info">
<div className="loading-spinner"></div>
<span>Publishing package...</span>
</div>
) : !address || !isConnected ? (
<>
<div className="connect-wallet">
<h4>Please connect your wallet to publish a package</h4>
<ConnectButton />
</>
</div>
) : isConnecting ? (
<div className="message info">Approve connection in your wallet</div>
<div className="message info">
<div className="loading-spinner"></div>
<span>Approve connection in your wallet</span>
</div>
) : (
<form className="publish-form" onSubmit={publishPackage}>
<div className="form-group">
@ -248,33 +276,36 @@ export default function PublishPage() {
</div>
<div className="form-group">
<label htmlFor="metadata-url">Metadata URL</label>
<div style={{ display: 'flex', alignItems: 'center', gap: '4px' }}>
<label>Metadata URL</label>
<Tooltip content={<>add a link to metadata.json here (<a href="https://raw.githubusercontent.com/kinode-dao/kit/47cdf82f70b36f2a102ddfaaeed5efa10d7ef5b9/src/new/templates/rust/ui/chat/metadata.json" target="_blank" rel="noopener noreferrer">example link</a>)</>} />
</div>
<input
id="metadata-url"
type="text"
required
value={metadataUrl}
onChange={(e) => setMetadataUrl(e.target.value)}
onBlur={calculateMetadataHash}
placeholder="https://github/my-org/my-repo/metadata.json"
/>
<p className="help-text">
Metadata is a JSON file that describes your package.
</p>
{metadataError && <p className="error-message">{metadataError}</p>}
</div>
<div className="form-group">
<label htmlFor="metadata-hash">Metadata Hash</label>
<label>Metadata Hash</label>
<input
readOnly
id="metadata-hash"
type="text"
value={metadataHash}
placeholder="Calculated automatically from metadata URL"
/>
</div>
<button type="submit" disabled={isConfirming || nameValidity !== null}>
{isConfirming ? 'Publishing...' : 'Publish'}
<button type="submit" disabled={isConfirming || nameValidity !== null || Boolean(metadataError)}>
{isConfirming ? (
<>
<div className="loading-spinner small"></div>
<span>Publishing...</span>
</>
) : (
'Publish'
)}
</button>
</form>
)}
@ -293,21 +324,24 @@ export default function PublishPage() {
<div className="my-packages">
<h2>Packages You Own</h2>
{Object.keys(ourApps).length > 0 ? (
<ul>
<ul className="package-list">
{Object.values(ourApps).map((app) => (
<li key={`${app.package_id.package_name}:${app.package_id.publisher_node}`}>
<Link to={`/app/${app.package_id.package_name}:${app.package_id.publisher_node}`} className="app-name">
{app.metadata?.name || app.package_id.package_name}
{app.metadata?.image && (
<img src={app.metadata.image} alt="" className="package-icon" />
)}
<span>{app.metadata?.name || app.package_id.package_name}</span>
</Link>
<button onClick={() => unpublishPackage(app.package_id.package_name, app.package_id.publisher_node)}>
<button onClick={() => unpublishPackage(app.package_id.package_name, app.package_id.publisher_node)} className="danger">
Unpublish
</button>
</li>
))}
</ul>
) : (
<p>No packages published</p>
<p className="no-packages">No packages published</p>
)}
</div>
</div>

View File

@ -2,13 +2,15 @@ import React, { useState, useEffect } from "react";
import useAppsStore from "../store";
import { AppListing } from "../types/Apps";
import { Link } from "react-router-dom";
import { FaSearch } from "react-icons/fa";
export default function StorePage() {
const { listings, fetchListings } = useAppsStore();
const { listings, fetchListings, fetchUpdates } = useAppsStore();
const [searchQuery, setSearchQuery] = useState<string>("");
useEffect(() => {
fetchListings();
fetchUpdates();
}, [fetchListings]);
// extensive temp null handling due to weird prod bug
@ -25,12 +27,15 @@ export default function StorePage() {
return (
<div className="store-page">
<div className="store-header">
<input
type="text"
placeholder="Search apps..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
/>
<div className="search-bar">
<input
type="text"
placeholder="Search apps..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
/>
<FaSearch />
</div>
</div>
<div className="app-list">
{!listings ? (

View File

@ -1,11 +1,11 @@
import { create } from 'zustand'
import { persist } from 'zustand/middleware'
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification } from '../types/Apps'
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification, UpdateInfo } from '../types/Apps'
import { HTTP_STATUS } from '../constants/http'
import KinodeClientApi from "@kinode/client-api"
import { WEBSOCKET_URL } from '../utils/ws'
const BASE_URL = '/main:app_store:sys'
const BASE_URL = '/main:app-store:sys'
interface AppsStore {
listings: Record<string, AppListing>
@ -16,6 +16,7 @@ interface AppsStore {
notifications: Notification[]
homepageApps: HomepageApp[]
activeDownloads: Record<string, { downloaded: number, total: number }>
updates: Record<string, UpdateInfo>
fetchData: (id: string) => Promise<void>
fetchListings: () => Promise<void>
@ -26,6 +27,7 @@ interface AppsStore {
fetchOurApps: () => Promise<void>
fetchDownloadsForApp: (id: string) => Promise<DownloadItem[]>
checkMirror: (node: string) => Promise<MirrorCheckFile | null>
resetStore: () => Promise<void>
fetchHomepageApps: () => Promise<void>
getLaunchUrl: (id: string) => string | null
@ -48,6 +50,8 @@ interface AppsStore {
clearActiveDownload: (appId: string) => void
clearAllActiveDownloads: () => void;
fetchUpdates: () => Promise<void>
clearUpdates: (packageId: string) => Promise<void>
}
const useAppsStore = create<AppsStore>()((set, get) => ({
@ -58,7 +62,7 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
activeDownloads: {},
homepageApps: [],
notifications: [],
updates: {},
fetchData: async (id: string) => {
if (!id) return;
@ -380,10 +384,59 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
});
},
fetchUpdates: async () => {
try {
const res = await fetch(`${BASE_URL}/updates`);
if (res.status === HTTP_STATUS.OK) {
const updates = await res.json();
set({ updates });
}
} catch (error) {
console.error("Error fetching updates:", error);
}
},
clearUpdates: async (packageId: string) => {
try {
await fetch(`${BASE_URL}/updates/${packageId}/clear`, {
method: 'POST',
});
set((state) => {
const newUpdates = { ...state.updates };
delete newUpdates[packageId];
return { updates: newUpdates };
});
} catch (error) {
console.error("Error clearing updates:", error);
}
},
resetStore: async () => {
try {
const response = await fetch(`${BASE_URL}/reset`, {
method: 'POST',
});
if (!response.ok) {
throw new Error('Reset failed');
}
// Refresh the store data
await Promise.all([
get().fetchInstalled(),
get().fetchListings(),
get().fetchUpdates(),
]);
} catch (error) {
console.error('Reset failed:', error);
throw error;
}
},
ws: new KinodeClientApi({
uri: WEBSOCKET_URL,
nodeId: (window as any).our?.node,
processId: "main:app_store:sys",
processId: "main:app-store:sys",
onMessage: (message) => {
console.log('WebSocket message received', message);
try {
@ -419,10 +472,26 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
get().removeNotification(`download-${appId}`);
if (error) {
const formatDownloadError = (error: any): string => {
if (typeof error === 'object' && error !== null) {
if ('HashMismatch' in error) {
const { actual, desired } = error.HashMismatch;
return `Hash mismatch: expected ${desired.slice(0, 8)}..., got ${actual.slice(0, 8)}...`;
}
// Try to serialize the error object if it's not a HashMismatch
try {
return JSON.stringify(error);
} catch {
return String(error);
}
}
return String(error);
};
get().addNotification({
id: `error-${appId}`,
type: 'error',
message: `Download failed for ${package_id.package_name}: ${error}`,
message: `Download failed for ${package_id.package_name}: ${formatDownloadError(error)}`,
timestamp: Date.now(),
});
} else {
@ -452,4 +521,4 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
}),
}))
export default useAppsStore
export default useAppsStore

View File

@ -94,6 +94,35 @@ export interface HomepageApp {
favorite: boolean;
}
export interface HashMismatch {
desired: string;
actual: string;
}
export type DownloadError =
| "NoPackage"
| "NotMirroring"
| { HashMismatch: HashMismatch }
| "FileNotFound"
| "WorkerSpawnFailed"
| "HttpClientError"
| "BlobNotFound"
| "VfsError"
| { HandlingError: string }
| "Timeout"
| "InvalidManifest"
| "Offline";
export interface UpdateInfo {
errors: [string, DownloadError][]; // [url/node, error]
pending_manifest_hash: string | null;
}
export type Updates = {
[key: string]: { // package_id
[key: string]: UpdateInfo; // version_hash -> update info
};
};
export type NotificationActionType = 'click' | 'modal' | 'popup' | 'redirect';

View File

@ -1,5 +1,5 @@
// TODO: remove as much as possible of this..
const BASE_URL = "/main:app_store:sys/";
const BASE_URL = "/main:app-store:sys/";
if (window.our) window.our.process = BASE_URL?.replace("/", "");
@ -8,4 +8,4 @@ export const PROXY_TARGET = `${(import.meta.env.VITE_NODE_URL || `http://localho
// This env also has BASE_URL which should match the process + package name
export const WEBSOCKET_URL = import.meta.env.DEV
? `${PROXY_TARGET.replace('http', 'ws')}`
: undefined;
: undefined;

View File

@ -14,7 +14,7 @@ IMPORTANT:
This must match the process name from pkg/manifest.json + pkg/metadata.json
The format is "/" + "process_name:package_name:publisher_node"
*/
const BASE_URL = `/main:app_store:sys`;
const BASE_URL = `/main:app-store:sys`;
// This is the proxy URL, it must match the node you are developing against
const PROXY_URL = (process.env.VITE_NODE_URL || 'http://localhost:8080').replace(/\/$/, '');

View File

@ -0,0 +1,21 @@
[package]
name = "uninstall"
version = "0.1.0"
edition = "2021"
[features]
simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.10.0"
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

View File

@ -1,8 +1,8 @@
//! uninstall:app_store:sys
//! uninstall:app-store:sys
//! terminal script for uninstalling apps from the app store.
//!
//! Usage:
//! uninstall:app_store:sys <package_id>
//! uninstall:app-store:sys <package_id>
//!
//! Arguments:
//! <package_id> The package ID of the app (e.g., app:publisher.os)
@ -41,7 +41,7 @@ fn init(our: Address) {
};
let Ok(Ok(Message::Response { body, .. })) =
Request::to((our.node(), ("main", "app_store", "sys")))
Request::to((our.node(), ("main", "app-store", "sys")))
.body(LocalRequest::Uninstall(
crate::kinode::process::main::PackageId {
package_name: package_id.package_name.clone(),
@ -50,12 +50,12 @@ fn init(our: Address) {
))
.send_and_await_response(5)
else {
println!("uninstall: failed to get a response from app_store..!");
println!("uninstall: failed to get a response from app-store..!");
return;
};
let Ok(response) = body.try_into() else {
println!("uninstall: failed to parse response from app_store..!");
println!("uninstall: failed to parse response from app-store..!");
return;
};
@ -67,7 +67,7 @@ fn init(our: Address) {
println!("failed to uninstall package {package_id}!");
}
_ => {
println!("uninstall: unexpected response from app_store..!");
println!("uninstall: unexpected response from app-store..!");
return;
}
}

View File

@ -1,587 +0,0 @@
#![feature(let_chains)]
//! chain:app_store:sys
//! This process manages the on-chain interactions for the App Store system in the Kinode ecosystem.
//! It is responsible for indexing and tracking app metadata stored on the blockchain.
//!
//! ## Responsibilities:
//!
//! 1. Index and track app metadata from the blockchain.
//! 2. Manage subscriptions to relevant blockchain events.
//! 3. Provide up-to-date information about available apps and their metadata.
//! 4. Handle auto-update settings for apps.
//!
//! ## Key Components:
//!
//! - `handle_eth_log`: Processes blockchain events related to app metadata updates.
//! - `fetch_and_subscribe_logs`: Initializes and maintains blockchain event subscriptions.
//!
//! ## Interaction Flow:
//!
//! 1. The process subscribes to relevant blockchain events on startup.
//! 2. When new events are received, they are processed to update the local state.
//! 3. Other processes (like main) can request information about apps.
//! 4. The chain process responds with the most up-to-date information from its local state.
//!
//! Note: This process does not handle app binaries or installation. It focuses solely on
//! metadata management and providing information about available apps.
//!
use crate::kinode::process::chain::{
ChainError, ChainRequests, OnchainApp, OnchainMetadata, OnchainProperties,
};
use crate::kinode::process::downloads::{AutoUpdateRequest, DownloadRequests};
use alloy_primitives::keccak256;
use alloy_sol_types::SolEvent;
use kinode::process::chain::ChainResponses;
use kinode_process_lib::{
await_message, call_init, eth, get_blob, get_state, http, kernel_types as kt, kimap,
print_to_terminal, println, timer, Address, Message, PackageId, Request, Response,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, HashSet},
str::FromStr,
};
wit_bindgen::generate!({
path: "target/wit",
generate_unused_types: true,
world: "app-store-sys-v1",
additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],
});
#[cfg(not(feature = "simulation-mode"))]
const CHAIN_ID: u64 = kimap::KIMAP_CHAIN_ID;
#[cfg(feature = "simulation-mode")]
const CHAIN_ID: u64 = 31337; // local
const CHAIN_TIMEOUT: u64 = 60; // 60s
#[cfg(not(feature = "simulation-mode"))]
const KIMAP_ADDRESS: &'static str = kimap::KIMAP_ADDRESS; // optimism
#[cfg(feature = "simulation-mode")]
const KIMAP_ADDRESS: &str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C";
const DELAY_MS: u64 = 1_000; // 1s
#[derive(Debug, Serialize, Deserialize)]
pub struct State {
/// the kimap helper we are using
pub kimap: kimap::Kimap,
/// the last block at which we saved the state of the listings to disk.
/// when we boot, we can read logs starting from this block and
/// rebuild latest state.
pub last_saved_block: u64,
/// onchain listings
pub listings: HashMap<PackageId, PackageListing>,
/// set of packages that we have published
pub published: HashSet<PackageId>,
}
/// listing information derived from metadata hash in listing event
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PackageListing {
pub tba: eth::Address,
pub metadata_uri: String,
pub metadata_hash: String,
// should this even be optional?
// relegate to only valid apps maybe?
pub metadata: Option<kt::Erc721Metadata>,
pub auto_update: bool,
}
#[derive(Debug, Serialize, Deserialize, process_macros::SerdeJsonInto)]
#[serde(untagged)] // untagged as a meta-type for all incoming requests
pub enum Req {
Eth(eth::EthSubResult),
Request(ChainRequests),
}
call_init!(init);
fn init(our: Address) {
println!(
"chain started, indexing on contract address {}",
KIMAP_ADDRESS
);
// create new provider with request-timeout of 60s
// can change, log requests can take quite a long time.
let eth_provider: eth::Provider = eth::Provider::new(CHAIN_ID, CHAIN_TIMEOUT);
let mut state = fetch_state(eth_provider);
fetch_and_subscribe_logs(&our, &mut state);
loop {
match await_message() {
Err(send_error) => {
print_to_terminal(1, &format!("chain: got network error: {send_error}"));
}
Ok(message) => {
if let Err(e) = handle_message(&our, &mut state, &message) {
print_to_terminal(1, &format!("chain: error handling message: {:?}", e));
}
}
}
}
}
fn handle_message(our: &Address, state: &mut State, message: &Message) -> anyhow::Result<()> {
if !message.is_request() {
if message.is_local(&our) && message.source().process == "timer:distro:sys" {
// handling of ETH RPC subscriptions delayed by DELAY_MS
// to allow kns to have a chance to process block: handle now
let Some(context) = message.context() else {
return Err(anyhow::anyhow!("foo"));
};
let log = serde_json::from_slice(context)?;
handle_eth_log(our, state, log, false)?;
return Ok(());
}
} else {
match message.body().try_into()? {
Req::Eth(eth_result) => {
if !message.is_local(our) || message.source().process != "eth:distro:sys" {
return Err(anyhow::anyhow!(
"eth sub event from unexpected address: {}",
message.source()
));
}
if let Ok(eth::EthSub { result, .. }) = eth_result {
if let eth::SubscriptionResult::Log(ref log) = result {
// delay handling of ETH RPC subscriptions by DELAY_MS
// to allow kns to have a chance to process block
timer::set_timer(DELAY_MS, Some(serde_json::to_vec(log)?));
}
} else {
// attempt to resubscribe
state
.kimap
.provider
.subscribe_loop(1, app_store_filter(state));
}
}
Req::Request(chains) => {
handle_local_request(state, chains)?;
}
}
}
Ok(())
}
fn handle_local_request(state: &mut State, req: ChainRequests) -> anyhow::Result<()> {
match req {
ChainRequests::GetApp(package_id) => {
let onchain_app = state
.listings
.get(&package_id.clone().to_process_lib())
.map(|app| OnchainApp {
package_id: package_id,
tba: app.tba.to_string(),
metadata_uri: app.metadata_uri.clone(),
metadata_hash: app.metadata_hash.clone(),
metadata: app.metadata.as_ref().map(|m| m.clone().into()),
auto_update: app.auto_update,
});
let response = ChainResponses::GetApp(onchain_app);
Response::new().body(&response).send()?;
}
ChainRequests::GetApps => {
let apps: Vec<OnchainApp> = state
.listings
.iter()
.map(|(id, listing)| listing.to_onchain_app(id))
.collect();
let response = ChainResponses::GetApps(apps);
Response::new().body(&response).send()?;
}
ChainRequests::GetOurApps => {
let apps: Vec<OnchainApp> = state
.published
.iter()
.filter_map(|id| {
state
.listings
.get(id)
.map(|listing| listing.to_onchain_app(id))
})
.collect();
let response = ChainResponses::GetOurApps(apps);
Response::new().body(&response).send()?;
}
ChainRequests::StartAutoUpdate(package_id) => {
if let Some(listing) = state.listings.get_mut(&package_id.to_process_lib()) {
listing.auto_update = true;
let response = ChainResponses::AutoUpdateStarted;
Response::new().body(&response).send()?;
} else {
let error_response = ChainResponses::Err(ChainError::NoPackage);
Response::new().body(&error_response).send()?;
}
}
ChainRequests::StopAutoUpdate(package_id) => {
if let Some(listing) = state.listings.get_mut(&package_id.to_process_lib()) {
listing.auto_update = false;
let response = ChainResponses::AutoUpdateStopped;
Response::new().body(&response).send()?;
} else {
let error_response = ChainResponses::Err(ChainError::NoPackage);
Response::new().body(&error_response).send()?;
}
}
}
Ok(())
}
fn handle_eth_log(
our: &Address,
state: &mut State,
log: eth::Log,
startup: bool,
) -> anyhow::Result<()> {
let block_number: u64 = log
.block_number
.ok_or(anyhow::anyhow!("log missing block number"))?;
let Ok(note) = kimap::decode_note_log(&log) else {
// ignore invalid logs here -- they're not actionable
return Ok(());
};
let package_id = note
.parent_path
.split_once('.')
.ok_or(anyhow::anyhow!("invalid publisher name"))
.and_then(|(package, publisher)| {
if package.is_empty() || publisher.is_empty() {
Err(anyhow::anyhow!("invalid publisher name"))
} else {
Ok(PackageId::new(&package, &publisher))
}
})?;
// the app store exclusively looks for ~metadata-uri postings: if one is
// observed, we then *query* for ~metadata-hash to verify the content
// at the URI.
let metadata_uri = String::from_utf8_lossy(&note.data).to_string();
let is_our_package = &package_id.publisher() == &our.node();
let (tba, metadata_hash) = if !startup {
// generate ~metadata-hash full-path
let hash_note = format!("~metadata-hash.{}", note.parent_path);
// owner can change which we don't track (yet?) so don't save, need to get when desired
let (tba, _owner, data) = match state.kimap.get(&hash_note) {
Ok(gr) => Ok(gr),
Err(e) => match e {
eth::EthError::RpcError(_) => {
// retry on RpcError after DELAY_MS sleep
// sleep here rather than with, e.g., a message to
// `timer:distro:sys` so that events are processed in
// order of receipt
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
state.kimap.get(&hash_note)
}
_ => Err(e),
},
}
.map_err(|e| anyhow::anyhow!("Couldn't find {hash_note}: {e:?}"))?;
match data {
None => {
// if ~metadata-uri is also empty, this is an unpublish action!
if metadata_uri.is_empty() {
state.published.remove(&package_id);
state.listings.remove(&package_id);
return Ok(());
}
return Err(anyhow::anyhow!(
"metadata hash not found: {package_id}, {metadata_uri}"
));
}
Some(hash_note) => (tba, String::from_utf8_lossy(&hash_note).to_string()),
}
} else {
(eth::Address::ZERO, String::new())
};
if is_our_package {
state.published.insert(package_id.clone());
}
// if this is a startup event, we don't need to fetch metadata from the URI --
// we'll loop over all listings after processing all logs and fetch them as needed.
// fetch metadata from the URI (currently only handling HTTP(S) URLs!)
// assert that the metadata hash matches the fetched data
let metadata = if !startup {
Some(fetch_metadata_from_url(&metadata_uri, &metadata_hash, 30)?)
} else {
None
};
match state.listings.entry(package_id.clone()) {
std::collections::hash_map::Entry::Occupied(mut listing) => {
let listing = listing.get_mut();
listing.metadata_uri = metadata_uri;
listing.tba = tba;
listing.metadata_hash = metadata_hash;
listing.metadata = metadata.clone();
}
std::collections::hash_map::Entry::Vacant(listing) => {
listing.insert(PackageListing {
tba,
metadata_uri,
metadata_hash,
metadata: metadata.clone(),
auto_update: false,
});
}
}
if !startup {
// if auto_update is enabled, send a message to downloads to kick off the update.
if let Some(listing) = state.listings.get(&package_id) {
if listing.auto_update {
print_to_terminal(0, &format!("kicking off auto-update for: {}", package_id));
Request::to(("our", "downloads", "app_store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id,
),
metadata: metadata.unwrap().into(),
}))
.send()
.unwrap();
}
}
}
state.last_saved_block = block_number;
Ok(())
}
/// after startup, fetch metadata for all listings
/// we do this as a separate step to not repeatedly fetch outdated metadata
/// as we process logs.
fn update_all_metadata(state: &mut State) {
state.listings.retain(|package_id, listing| {
let (tba, metadata_hash) = {
// generate ~metadata-hash full-path
let hash_note = format!(
"~metadata-hash.{}.{}",
package_id.package(),
package_id.publisher()
);
// owner can change which we don't track (yet?) so don't save, need to get when desired
let Ok((tba, _owner, data)) = (match state.kimap.get(&hash_note) {
Ok(gr) => Ok(gr),
Err(e) => match e {
eth::EthError::RpcError(_) => {
// retry on RpcError after DELAY_MS sleep
// sleep here rather than with, e.g., a message to
// `timer:distro:sys` so that events are processed in
// order of receipt
std::thread::sleep(std::time::Duration::from_millis(DELAY_MS));
state.kimap.get(&hash_note)
}
_ => Err(e),
},
}) else {
return false;
};
match data {
None => {
// if ~metadata-uri is also empty, this is an unpublish action!
if listing.metadata_uri.is_empty() {
state.published.remove(package_id);
}
return false;
}
Some(hash_note) => (tba, String::from_utf8_lossy(&hash_note).to_string()),
}
};
listing.tba = tba;
listing.metadata_hash = metadata_hash;
let metadata =
fetch_metadata_from_url(&listing.metadata_uri, &listing.metadata_hash, 30).ok();
listing.metadata = metadata.clone();
if listing.auto_update {
print_to_terminal(0, &format!("kicking off auto-update for: {}", package_id));
Request::to(("our", "downloads", "app_store", "sys"))
.body(&DownloadRequests::AutoUpdate(AutoUpdateRequest {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id.clone(),
),
metadata: metadata.unwrap().into(),
}))
.send()
.unwrap();
}
true
});
}
/// create the filter used for app store getLogs and subscription.
/// the app store exclusively looks for ~metadata-uri postings: if one is
/// observed, we then *query* for ~metadata-hash to verify the content
/// at the URI.
///
/// this means that ~metadata-hash should be *posted before or at the same time* as ~metadata-uri!
pub fn app_store_filter(state: &State) -> eth::Filter {
let notes = vec![keccak256("~metadata-uri")];
eth::Filter::new()
.address(*state.kimap.address())
.events([kimap::contract::Note::SIGNATURE])
.topic3(notes)
}
/// create a filter to fetch app store event logs from chain and subscribe to new events
pub fn fetch_and_subscribe_logs(our: &Address, state: &mut State) {
let filter = app_store_filter(state);
// get past logs, subscribe to new ones.
// subscribe first so we don't miss any logs
println!("subscribing...");
state.kimap.provider.subscribe_loop(1, filter.clone());
for log in fetch_logs(
&state.kimap.provider,
&filter.from_block(state.last_saved_block),
) {
if let Err(e) = handle_eth_log(our, state, log, true) {
print_to_terminal(1, &format!("error ingesting log: {e}"));
};
}
update_all_metadata(state);
}
/// fetch logs from the chain with a given filter
fn fetch_logs(eth_provider: &eth::Provider, filter: &eth::Filter) -> Vec<eth::Log> {
loop {
match eth_provider.get_logs(filter) {
Ok(res) => return res,
Err(_) => {
println!("failed to fetch logs! trying again in 5s...");
std::thread::sleep(std::time::Duration::from_secs(5));
continue;
}
}
}
}
/// fetch metadata from url and verify it matches metadata_hash
pub fn fetch_metadata_from_url(
metadata_url: &str,
metadata_hash: &str,
timeout: u64,
) -> Result<kt::Erc721Metadata, anyhow::Error> {
if let Ok(url) = url::Url::parse(metadata_url) {
if let Ok(_) =
http::client::send_request_await_response(http::Method::GET, url, None, timeout, vec![])
{
if let Some(body) = get_blob() {
let hash = keccak_256_hash(&body.bytes);
if &hash == metadata_hash {
return Ok(serde_json::from_slice::<kt::Erc721Metadata>(&body.bytes)
.map_err(|_| anyhow::anyhow!("metadata not found"))?);
} else {
return Err(anyhow::anyhow!("metadata hash mismatch"));
}
}
}
}
Err(anyhow::anyhow!("metadata not found"))
}
/// generate a Keccak-256 hash string (with 0x prefix) of the metadata bytes
pub fn keccak_256_hash(bytes: &[u8]) -> String {
use sha3::{Digest, Keccak256};
let mut hasher = Keccak256::new();
hasher.update(bytes);
format!("0x{:x}", hasher.finalize())
}
/// fetch state from disk or create a new one if that fails
pub fn fetch_state(provider: eth::Provider) -> State {
if let Some(state_bytes) = get_state() {
match serde_json::from_slice::<State>(&state_bytes) {
Ok(state) => {
if state.kimap.address().to_string() == KIMAP_ADDRESS {
return state;
} else {
println!(
"state contract address mismatch. rebuilding state! expected {}, got {}",
KIMAP_ADDRESS,
state.kimap.address().to_string()
);
}
}
Err(e) => println!("failed to deserialize saved state, rebuilding: {e}"),
}
}
State {
kimap: kimap::Kimap::new(provider, eth::Address::from_str(KIMAP_ADDRESS).unwrap()),
last_saved_block: 0,
listings: HashMap::new(),
published: HashSet::new(),
}
}
// quite annoyingly, we must convert from our gen'd version of PackageId
// to the process_lib's gen'd version. this is in order to access custom
// Impls that we want to use
impl crate::kinode::process::main::PackageId {
pub fn to_process_lib(self) -> PackageId {
PackageId {
package_name: self.package_name,
publisher_node: self.publisher_node,
}
}
pub fn from_process_lib(package_id: PackageId) -> Self {
Self {
package_name: package_id.package_name,
publisher_node: package_id.publisher_node,
}
}
}
impl PackageListing {
pub fn to_onchain_app(&self, package_id: &PackageId) -> OnchainApp {
OnchainApp {
package_id: crate::kinode::process::main::PackageId::from_process_lib(
package_id.clone(),
),
tba: self.tba.to_string(),
metadata_uri: self.metadata_uri.clone(),
metadata_hash: self.metadata_hash.clone(),
metadata: self.metadata.as_ref().map(|m| m.clone().into()),
auto_update: self.auto_update,
}
}
}
impl From<kt::Erc721Metadata> for OnchainMetadata {
fn from(erc: kt::Erc721Metadata) -> Self {
OnchainMetadata {
name: erc.name,
description: erc.description,
image: erc.image,
external_url: erc.external_url,
animation_url: erc.animation_url,
properties: OnchainProperties {
package_name: erc.properties.package_name,
publisher: erc.properties.publisher,
current_version: erc.properties.current_version,
mirrors: erc.properties.mirrors,
code_hashes: erc.properties.code_hashes.into_iter().collect(),
license: erc.properties.license,
screenshots: erc.properties.screenshots,
wit_version: erc.properties.wit_version,
dependencies: erc.properties.dependencies,
},
}
}
}

View File

@ -1,21 +0,0 @@
[package]
name = "download"
version = "0.1.0"
edition = "2021"
[features]
simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

View File

@ -1 +0,0 @@
../../ft_worker/src/ft_worker_lib.rs

View File

@ -1,21 +0,0 @@
[package]
name = "install"
version = "0.1.0"
edition = "2021"
[features]
simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

View File

@ -1,716 +0,0 @@
/* Base styles */
body {
font-family: var(--font-family-main);
line-height: 1.6;
color: light-dark(var(--off-black), var(--off-white));
background-color: light-dark(var(--tan), var(--tasteful-dark));
}
/* Layout */
.app-content {
max-width: 1000px;
margin: 0 auto;
padding: 1rem;
}
/* Typography */
h1,
h2,
h3,
h4,
h5,
h6 {
color: var(--orange);
margin-bottom: 1rem;
}
a {
color: var(--blue);
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
/* Header */
.app-header {
background-color: light-dark(var(--off-white), var(--off-black));
padding: 1rem;
margin-bottom: 1rem;
display: flex;
justify-content: space-between;
align-items: center;
}
.header-left {
display: flex;
align-items: center;
gap: 1rem;
}
.header-right {
display: flex;
align-items: center;
gap: 1rem;
/* Provides consistent spacing between NotificationBay and ConnectButton */
}
.header-left h1 {
margin: 0;
font-size: 1.5rem;
}
.header-left nav {
display: flex;
gap: 1rem;
}
.header-left nav a {
color: var(--orange);
text-decoration: none;
padding: 0.5rem;
border-radius: var(--border-radius);
}
.header-left nav a:hover,
.header-left nav a.active {
background-color: var(--orange);
color: var(--white);
}
/* Forms */
form {
display: flex;
flex-direction: column;
gap: 1rem;
max-width: 500px;
}
.form-group {
display: flex;
flex-direction: column;
margin-bottom: 1rem;
}
label {
margin-bottom: 0.5rem;
}
input,
select {
padding: 0.5rem;
border: 1px solid var(--gray);
border-radius: var(--border-radius);
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
}
/* Buttons */
button {
padding: 0.5rem 1rem;
background-color: var(--orange);
color: var(--white);
border: none;
border-radius: var(--border-radius);
cursor: pointer;
}
button:hover {
background-color: var(--dark-orange);
}
button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
/* Tables */
table {
width: 100%;
border-collapse: collapse;
margin-bottom: 1rem;
}
th,
td {
padding: 0.5rem;
text-align: left;
border-bottom: 1px solid var(--gray);
}
/* App Icon */
.app-icon {
width: 64px;
height: 64px;
min-width: 64px;
min-height: 64px;
object-fit: cover;
border-radius: var(--border-radius);
}
/* Messages */
.message {
padding: 1rem;
border-radius: var(--border-radius);
margin-bottom: 1rem;
}
.message.error {
background-color: var(--red);
color: var(--white);
}
.message.success {
background-color: var(--green);
color: var(--white);
}
.message.info {
background-color: var(--blue);
color: var(--white);
}
/* Publisher Info */
.publisher-info {
display: flex;
align-items: center;
gap: 0.5rem;
margin-bottom: 1rem;
}
.address {
font-family: monospace;
background-color: var(--gray);
padding: 0.25rem 0.5rem;
border-radius: var(--border-radius);
}
/* Help Text */
.help-text {
font-size: 0.9rem;
color: var(--gray);
margin-top: 0.25rem;
}
/* Status Icons */
.status-icon {
display: inline-flex;
align-items: center;
}
.installed {
color: var(--green);
}
.not-installed {
color: var(--red);
}
/* App Title */
.app-title {
display: flex;
flex-direction: column;
}
.app-title-container {
display: flex;
align-items: center;
gap: 1rem;
}
.app-id {
font-size: 0.9rem;
color: var(--gray);
}
/* Detail List */
.detail-list {
list-style-type: none;
padding: 0;
}
.detail-list li {
display: flex;
justify-content: space-between;
margin-bottom: 0.5rem;
}
/* Error Message */
.error-message {
display: flex;
align-items: center;
gap: 0.5rem;
color: var(--red);
font-size: 0.9rem;
margin-top: 0.25rem;
}
/* App Page and Download Page shared styles */
.app-page,
.downloads-page {
background-color: light-dark(var(--white), var(--maroon));
border-radius: var(--border-radius);
padding: 2rem;
width: 100%;
}
.app-header {
display: flex;
align-items: center;
gap: 1rem;
margin-bottom: 1.5rem;
}
.app-description {
margin-bottom: 2rem;
line-height: 1.6;
}
.app-info {
background-color: light-dark(var(--tan), var(--tasteful-dark));
border-radius: var(--border-radius);
padding: 1.5rem;
margin-bottom: 2rem;
}
/* Download Page specific styles */
.download-section {
display: flex;
flex-direction: column;
gap: 1rem;
margin-bottom: 2rem;
max-width: 20rem;
}
.version-selector,
.mirror-selector select {
width: 100%;
padding: 0.5em;
border: 1px solid var(--gray);
border-radius: var(--border-radius);
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
}
/* Action Buttons */
.action-button,
.primary,
.secondary {
display: inline-flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
padding: 0.75em 1em;
font-size: 1rem;
border: none;
border-radius: var(--border-radius);
cursor: pointer;
transition: background-color 0.3s ease, color 0.3s ease;
}
.primary {
background-color: var(--orange);
color: var(--white);
}
.primary:hover:not(:disabled) {
background-color: var(--dark-orange);
color: var(--white);
}
.secondary {
background-color: light-dark(var(--off-white), var(--off-black));
color: var(--orange);
border: 2px solid var(--orange);
}
.secondary:hover:not(:disabled) {
background-color: var(--orange);
color: var(--white);
}
.action-button:disabled,
.primary:disabled,
.secondary:disabled {
opacity: 0.5;
cursor: not-allowed;
}
/* App actions */
.app-actions {
display: flex;
gap: 1rem;
flex-wrap: wrap;
margin-bottom: 2rem;
}
/* Screenshots */
.app-screenshots {
margin-top: 2rem;
}
.screenshot-container {
display: flex;
gap: 1rem;
overflow-x: auto;
padding-bottom: 1rem;
}
.home-button {
min-width: 48px;
min-height: 48px;
width: 48px;
height: 48px;
}
.app-screenshot {
max-width: 200px;
height: auto;
border-radius: var(--border-radius);
}
/* Capabilities approval popup */
.cap-approval-popup {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: rgba(0, 0, 0, 0.5);
display: flex;
justify-content: center;
align-items: center;
z-index: 1000;
}
.cap-approval-content {
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
padding: 2rem;
border-radius: 8px;
max-width: 80%;
max-height: 80%;
overflow-y: auto;
}
.json-display {
background-color: light-dark(var(--tan), var(--off-black));
color: light-dark(var(--off-black), var(--off-white));
padding: 1rem;
border-radius: 4px;
white-space: pre-wrap;
word-break: break-word;
}
.approval-buttons {
display: flex;
justify-content: flex-end;
gap: 1rem;
margin-top: 1rem;
}
/* Responsive adjustments */
@media (max-width: 48em) {
.app-page,
.downloads-page {
padding: 1.5rem;
}
.app-actions {
flex-direction: column;
}
.download-section {
max-width: 100%;
}
}
@keyframes spin {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
.fa-spin {
animation: spin 1s linear infinite;
}
.manifest-display {
background: light-dark(var(--white), var(--tasteful-dark));
border-radius: var(--border-radius);
padding: 1rem;
max-width: 600px;
}
.process-manifest {
margin-bottom: 0.5rem;
border: 1px solid light-dark(var(--gray), var(--off-black));
border-radius: var(--border-radius);
overflow: hidden;
}
.process-header {
width: 100%;
display: flex;
align-items: center;
gap: 0.5rem;
padding: 0.75rem 1rem;
background: none;
border: none;
cursor: pointer;
color: light-dark(var(--off-black), var(--off-white));
transition: background-color 0.2s;
}
.process-header:hover {
background: light-dark(var(--tan), var(--off-black));
}
.process-name {
flex: 1;
text-align: left;
font-weight: 500;
}
.process-indicators {
display: flex;
gap: 0.5rem;
color: light-dark(var(--gray), var(--off-white));
}
.network-icon {
color: var(--orange);
}
.capability-icon {
color: var(--blue);
}
.private-icon {
color: var(--gray);
}
.process-details {
padding: 1rem;
background: light-dark(var(--tan), var(--off-black));
border-top: 1px solid light-dark(var(--gray), var(--off-black));
}
.capability-section {
margin-bottom: 1rem;
}
.capability-section:last-child {
margin-bottom: 0;
}
.capability-section h4 {
margin: 0 0 0.5rem 0;
color: light-dark(var(--off-black), var(--off-white));
}
.capability-section ul {
margin: 0;
padding-left: 1.5rem;
color: light-dark(var(--gray), var(--off-white));
}
.capability-section li {
margin-bottom: 0.25rem;
}
.notification-bay {
position: relative;
margin-right: 1rem;
}
.notification-button {
background: none;
border: none;
cursor: pointer;
display: flex;
align-items: center;
gap: 0.5rem;
padding: 0.5rem;
color: light-dark(var(--off-black), var(--off-white));
}
.notification-details {
position: absolute;
top: 100%;
right: 0;
width: 320px;
max-height: 400px;
overflow-y: auto;
background-color: light-dark(var(--white), var(--tasteful-dark));
border-radius: var(--border-radius);
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
z-index: 1000;
padding: 0.5rem;
}
.badge {
background-color: var(--orange);
color: var(--white);
border-radius: 50%;
padding: 0.25rem 0.5rem;
font-size: 0.75rem;
min-width: 1.5rem;
text-align: center;
}
.notification-item {
display: flex;
align-items: center;
padding: 1rem;
margin: 0.5rem 0;
border-radius: var(--border-radius);
background-color: light-dark(var(--tan), var(--off-black));
color: light-dark(var(--off-black), var(--off-white));
}
.notification-item.error {
background-color: light-dark(#ffe6e6, #4a2020);
}
.notification-item.success {
background-color: light-dark(#e6ffe6, #204a20);
}
.notification-item.warning {
background-color: light-dark(#fff3e6, #4a3820);
}
.notification-item.download {
background-color: light-dark(#e6f3ff, #20304a);
}
.notification-content {
flex: 1;
margin-right: 1rem;
}
.notification-actions {
display: flex;
gap: 0.5rem;
margin-left: auto;
}
.dismiss-button {
background: none;
border: none;
cursor: pointer;
color: light-dark(var(--gray), var(--off-white));
padding: 0.25rem;
}
.dismiss-button:hover {
color: var(--orange);
}
.progress-bar {
margin-top: 0.5rem;
height: 4px;
background-color: light-dark(var(--white), var(--off-black));
border-radius: 2px;
overflow: hidden;
}
.progress {
height: 100%;
background-color: var(--orange);
transition: width 0.3s ease;
}
/* Modal styles */
.modal-overlay {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: rgba(0, 0, 0, 0.5);
display: flex;
align-items: center;
justify-content: center;
z-index: 1100;
}
.modal-content {
background-color: light-dark(var(--white), var(--tasteful-dark));
color: light-dark(var(--off-black), var(--off-white));
padding: 1.5rem;
border-radius: var(--border-radius);
position: relative;
max-width: 80%;
max-height: 80vh;
overflow-y: auto;
}
.modal-close {
position: absolute;
top: 0.75rem;
right: 0.75rem;
background: none;
border: none;
cursor: pointer;
color: light-dark(var(--gray), var(--off-white));
padding: 0.25rem;
}
.modal-close:hover {
color: var(--orange);
}
.notification-button.has-errors {
animation: shake 0.82s cubic-bezier(.36, .07, .19, .97) both;
}
.badge.error-badge {
background-color: var(--error-red);
animation: pulse 2s infinite;
}
@keyframes shake {
10%,
90% {
transform: translate3d(-1px, 0, 0);
}
20%,
80% {
transform: translate3d(2px, 0, 0);
}
30%,
50%,
70% {
transform: translate3d(-4px, 0, 0);
}
40%,
60% {
transform: translate3d(4px, 0, 0);
}
}
@keyframes pulse {
0% {
opacity: 1;
}
50% {
opacity: 0.6;
}
100% {
opacity: 1;
}
}

View File

@ -1,21 +0,0 @@
[package]
name = "uninstall"
version = "0.1.0"
edition = "2021"
[features]
simulation-mode = []
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.9.4"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,23 @@
interface chess {
/// Our "chess protocol" request/response format. We'll always serialize these
/// to a byte vector and send them over IPC.
variant request {
/// lazy-load-blob: none.
new-game(new-game-request),
/// lazy-load-blob: none.
move(move-request),
/// lazy-load-blob: none.
resign(string),
}
variant response {
/// lazy-load-blob: none.
new-game-accepted,
/// lazy-load-blob: none.
new-game-rejected,
/// lazy-load-blob: none.
move-accepted,
/// lazy-load-blob: none.
move-rejected,
}
@ -28,5 +34,5 @@ interface chess {
world chess-sys-v0 {
import chess;
include process-v0;
include process-v1;
}

View File

@ -1,830 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
[[package]]
name = "autocfg"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78"
dependencies = [
"autocfg 1.1.0",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
[[package]]
name = "bytes"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chess"
version = "0.2.0"
dependencies = [
"anyhow",
"base64",
"bincode",
"kinode_process_lib",
"pleco",
"serde",
"serde_json",
"url",
"wit-bindgen",
]
[[package]]
name = "cloudabi"
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
[[package]]
name = "either"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "equivalent"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
dependencies = [
"percent-encoding",
]
[[package]]
name = "fuchsia-cprng"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
[[package]]
name = "getrandom"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "hashbrown"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "hermit-abi"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f"
[[package]]
name = "http"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "id-arena"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005"
[[package]]
name = "idna"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
dependencies = [
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "indexmap"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "433de089bd45971eecf4668ee0ee8f4cec17db4f8bd8f7bc3197a6ce37aa7d9b"
dependencies = [
"equivalent",
"hashbrown",
"serde",
]
[[package]]
name = "itoa"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]]
name = "kinode_process_lib"
version = "0.5.7"
source = "git+https://github.com/kinode-dao/process_lib?tag=v0.5.9-alpha#c1ac7227951fbd8cabf6568704f0ce11e8558c8a"
dependencies = [
"anyhow",
"bincode",
"http",
"mime_guess",
"rand 0.8.5",
"serde",
"serde_json",
"thiserror",
"url",
"wit-bindgen",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "leb128"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "libc"
version = "0.2.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7"
[[package]]
name = "log"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
dependencies = [
"mime",
"unicase",
]
[[package]]
name = "mucow"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c55d0c9dc43dedfd2414deb74ade67687749ef88b1d3482024d4c81d901a7a83"
[[package]]
name = "num_cpus"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pleco"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28a8c8ab569c544644c468a63f4fe4b33c0706b1472bebb517fabb75ec0f688e"
dependencies = [
"bitflags 1.3.2",
"lazy_static",
"mucow",
"num_cpus",
"rand 0.6.5",
"rayon",
]
[[package]]
name = "ppv-lite86"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
dependencies = [
"autocfg 0.1.8",
"libc",
"rand_chacha 0.1.1",
"rand_core 0.4.2",
"rand_hc",
"rand_isaac",
"rand_jitter",
"rand_os",
"rand_pcg",
"rand_xorshift",
"winapi",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
dependencies = [
"autocfg 0.1.8",
"rand_core 0.3.1",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core 0.6.4",
]
[[package]]
name = "rand_core"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
dependencies = [
"rand_core 0.4.2",
]
[[package]]
name = "rand_core"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_hc"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
dependencies = [
"rand_core 0.3.1",
]
[[package]]
name = "rand_isaac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
dependencies = [
"rand_core 0.3.1",
]
[[package]]
name = "rand_jitter"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
dependencies = [
"libc",
"rand_core 0.4.2",
"winapi",
]
[[package]]
name = "rand_os"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
dependencies = [
"cloudabi",
"fuchsia-cprng",
"libc",
"rand_core 0.4.2",
"rdrand",
"winapi",
]
[[package]]
name = "rand_pcg"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
dependencies = [
"autocfg 0.1.8",
"rand_core 0.4.2",
]
[[package]]
name = "rand_xorshift"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
dependencies = [
"rand_core 0.3.1",
]
[[package]]
name = "rayon"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "rdrand"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
dependencies = [
"rand_core 0.3.1",
]
[[package]]
name = "ryu"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c"
[[package]]
name = "semver"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0"
[[package]]
name = "serde"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.113"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "smallvec"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
[[package]]
name = "spdx"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62bde1398b09b9f93fc2fc9b9da86e362693e999d3a54a8ac47a99a5a73f638b"
dependencies = [
"smallvec",
]
[[package]]
name = "syn"
version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "unicase"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
dependencies = [
"version_check",
]
[[package]]
name = "unicode-bidi"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-normalization"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "url"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-encoder"
version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ad2b51884de9c7f4fe2fd1043fccb8dcad4b1e29558146ee57a144d15779f3f"
dependencies = [
"leb128",
]
[[package]]
name = "wasm-encoder"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e09bca7d6388637d27fb5edbeab11f56bfabcef8743c55ae34370e1e5030a071"
dependencies = [
"leb128",
]
[[package]]
name = "wasm-metadata"
version = "0.10.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c853d3809fc9fccf3bc0ad63f4f51d8eefad0bacf88f957aa991c1d9b88b016e"
dependencies = [
"anyhow",
"indexmap",
"serde",
"serde_derive",
"serde_json",
"spdx",
"wasm-encoder 0.41.0",
"wasmparser 0.121.0",
]
[[package]]
name = "wasmparser"
version = "0.118.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95ee9723b928e735d53000dec9eae7b07a60e490c85ab54abb66659fc61bfcd9"
dependencies = [
"indexmap",
"semver",
]
[[package]]
name = "wasmparser"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "953cf6a7606ab31382cb1caa5ae403e77ba70c7f8e12eeda167e7040d42bfda8"
dependencies = [
"bitflags 2.4.2",
"indexmap",
"semver",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "wit-bindgen"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"bitflags 2.4.2",
"wit-bindgen-rust-macro",
]
[[package]]
name = "wit-bindgen-core"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"anyhow",
"wit-component",
"wit-parser",
]
[[package]]
name = "wit-bindgen-rust"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"anyhow",
"heck",
"wasm-metadata",
"wit-bindgen-core",
"wit-component",
]
[[package]]
name = "wit-bindgen-rust-macro"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"anyhow",
"proc-macro2",
"quote",
"syn",
"wit-bindgen-core",
"wit-bindgen-rust",
"wit-component",
]
[[package]]
name = "wit-component"
version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b8a35a2a9992898c9d27f1664001860595a4bc99d32dd3599d547412e17d7e2"
dependencies = [
"anyhow",
"bitflags 2.4.2",
"indexmap",
"log",
"serde",
"serde_derive",
"serde_json",
"wasm-encoder 0.38.1",
"wasm-metadata",
"wasmparser 0.118.1",
"wit-parser",
]
[[package]]
name = "wit-parser"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df4913a2219096373fd6512adead1fb77ecdaa59d7fc517972a7d30b12f625be"
dependencies = [
"anyhow",
"id-arena",
"indexmap",
"log",
"semver",
"serde",
"serde_derive",
"serde_json",
"unicode-xid",
]

View File

@ -9,11 +9,11 @@ simulation-mode = []
[dependencies]
anyhow = "1.0"
bincode = "1.3.3"
kinode_process_lib = "0.9.4"
kinode_process_lib = "0.10.0"
pleco = "0.5"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]

View File

@ -95,7 +95,7 @@ fn initialize(our: Address) {
// add ourselves to the homepage
kinode_process_lib::homepage::add_to_homepage("Chess", Some(ICON), Some("/"), None);
// create an HTTP server struct with which to manipulate `http_server:distro:sys`
// create an HTTP server struct with which to manipulate `http-server:distro:sys`
let mut http_server = server::HttpServer::new(5);
let http_config = server::HttpBindingConfig::default();
@ -179,7 +179,7 @@ fn handle_request(
// if the message is from the HTTP server runtime module, we should handle it
// as an HTTP request and not a chess request
if message.source().process == "http_server:distro:sys" {
if message.source().process == "http-server:distro:sys" {
return handle_http_request(state, http_server, message);
}

View File

@ -1,16 +1,16 @@
{
"name": "Chess by Kinode",
"description": "A peer-to-peer chess game",
"description": "A peer-to-peer chess game.",
"image": "",
"properties": {
"package_name": "chess",
"current_version": "0.2.1",
"current_version": "1.0.0",
"publisher": "sys",
"mirrors": [],
"code_hashes": {
"0.2.1": ""
"1.0.0": ""
},
"wit_version": 0,
"wit_version": 1,
"dependencies": []
},
"external_url": "https://kinode.org",

View File

@ -6,11 +6,11 @@
"request_networking": true,
"request_capabilities": [
"homepage:homepage:sys",
"http_server:distro:sys",
"http-server:distro:sys",
"net:distro:sys",
"vfs:distro:sys"
],
"grant_capabilities": [],
"public": true
}
]
]

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@
resolver = "2"
members = [
"contacts",
"get_names",
"get-names",
]
[profile.release]

View File

@ -7,30 +7,53 @@ interface contacts {
}
variant request {
get-names, // requires read-names-only
get-all-contacts, // requires read
get-contact(string), // requires read
add-contact(string), // requires add
// tuple<node, field, value>
add-field(tuple<string, string, string>), // requires add
remove-contact(string), // requires remove
// tuple<node, field>
remove-field(tuple<string, string>), // requires remove
/// requires ReadNameOnly capability
/// lazy-load-blob: none.
get-names,
/// requires Read capability
/// lazy-load-blob: none.
get-all-contacts,
/// requires Read capability
/// lazy-load-blob: none.
get-contact(string),
/// requires Add capability
/// lazy-load-blob: none.
add-contact(string),
/// requires Add capability
/// lazy-load-blob: none.
/// tuple<node, field, value>
add-field(tuple<string, string, string>),
/// requires Remove capability
/// lazy-load-blob: none.
remove-contact(string),
/// requires Remove capability
/// lazy-load-blob: none.
/// tuple<node, field>
remove-field(tuple<string, string>),
}
variant response {
/// lazy-load-blob: none.
get-names(list<string>),
get-all-contacts, // JSON all-contacts dict in blob
get-contact, // JSON contact dict in blob
/// lazy-load-blob: required; JSON all-contacts dict in blob.
get-all-contacts,
/// lazy-load-blob: required; JSON contact dict in blob.
get-contact,
/// lazy-load-blob: none.
add-contact,
/// lazy-load-blob: none.
add-field,
/// lazy-load-blob: none.
remove-contact,
/// lazy-load-blob: none.
remove-field,
err(string), // any failed request will receive this response
/// any failed request will receive this response
/// lazy-load-blob: none.
err(string),
}
}
world contacts-sys-v0 {
import contacts;
include process-v0;
include process-v1;
}

View File

@ -7,11 +7,11 @@ edition = "2021"
simulation-mode = []
[dependencies]
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "088a549" }
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
kinode_process_lib = "0.10.0"
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]

View File

@ -1,7 +1,7 @@
use crate::kinode::process::contacts;
use kinode_process_lib::{
await_message, call_init, eth, get_blob, get_typed_state, homepage, http, kimap, kiprintln,
set_state, Address, Capability, LazyLoadBlob, Message, NodeId, Response,
await_message, call_init, eth, get_blob, get_typed_state, homepage, http, kimap, set_state,
Address, Capability, LazyLoadBlob, Message, NodeId, Response,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
@ -35,17 +35,27 @@ struct Contact(HashMap<String, serde_json::Value>);
struct Contacts(HashMap<NodeId, Contact>);
#[derive(Debug, Serialize, Deserialize)]
struct ContactsState {
struct ContactsStateV1 {
our: Address,
contacts: Contacts,
}
impl ContactsState {
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "version")]
enum VersionedState {
/// State fully stored in memory, persisted using serde_json.
/// Future state version will use SQLite.
V1(ContactsStateV1),
}
impl VersionedState {
fn new(our: Address) -> Self {
get_typed_state(|bytes| serde_json::from_slice(bytes)).unwrap_or(Self {
our,
contacts: Contacts(HashMap::new()),
})
get_typed_state(|bytes| serde_json::from_slice(bytes)).unwrap_or(Self::V1(
ContactsStateV1 {
our,
contacts: Contacts(HashMap::new()),
},
))
}
fn save(&self) {
@ -53,36 +63,57 @@ impl ContactsState {
}
fn contacts(&self) -> &Contacts {
&self.contacts
match self {
VersionedState::V1(state) => &state.contacts,
}
}
fn get_contact(&self, node: NodeId) -> Option<&Contact> {
self.contacts.0.get(&node)
match self {
VersionedState::V1(state) => state.contacts.0.get(&node),
}
}
fn add_contact(&mut self, node: NodeId) {
self.contacts.0.insert(node, Contact(HashMap::new()));
match self {
VersionedState::V1(state) => {
state.contacts.0.insert(node, Contact(HashMap::new()));
}
}
self.save();
}
fn remove_contact(&mut self, node: NodeId) {
self.contacts.0.remove(&node);
match self {
VersionedState::V1(state) => {
state.contacts.0.remove(&node);
}
}
self.save();
}
fn add_field(&mut self, node: NodeId, field: String, value: serde_json::Value) {
self.contacts
.0
.entry(node)
.or_insert_with(|| Contact(HashMap::new()))
.0
.insert(field, value);
match self {
VersionedState::V1(state) => {
state
.contacts
.0
.entry(node)
.or_insert_with(|| Contact(HashMap::new()))
.0
.insert(field, value);
}
}
self.save();
}
fn remove_field(&mut self, node: NodeId, field: String) {
if let Some(contact) = self.contacts.0.get_mut(&node) {
contact.0.remove(&field);
match self {
VersionedState::V1(state) => {
if let Some(contact) = state.contacts.0.get_mut(&node) {
contact.0.remove(&field);
}
}
}
self.save();
}
@ -97,15 +128,20 @@ impl ContactsState {
),
);
}
fn our(&self) -> &Address {
match self {
VersionedState::V1(state) => &state.our,
}
}
}
call_init!(initialize);
fn initialize(our: Address) {
kiprintln!("started");
homepage::add_to_homepage("Contacts", Some(ICON), Some("/"), None);
let mut state: ContactsState = ContactsState::new(our);
let mut state: VersionedState = get_typed_state(|bytes| serde_json::from_slice(bytes))
.unwrap_or_else(|| VersionedState::new(our));
let kimap = kimap::Kimap::new(
eth::Provider::new(CHAIN_ID, CHAIN_TIMEOUT),
@ -117,7 +153,7 @@ fn initialize(our: Address) {
// serve the frontend on a secure subdomain
http_server
.serve_ui(
&state.our,
state.our(),
"ui",
vec!["/"],
http::server::HttpBindingConfig::default().secure_subdomain(true),
@ -130,7 +166,7 @@ fn initialize(our: Address) {
}
fn main_loop(
state: &mut ContactsState,
state: &mut VersionedState,
kimap: &kimap::Kimap,
http_server: &mut http::server::HttpServer,
) {
@ -148,7 +184,7 @@ fn main_loop(
}) => {
// ignore messages from other nodes -- technically superfluous check
// since manifest does not acquire networking capability
if source.node() != state.our.node {
if source.node() != state.our().node {
continue;
}
handle_request(&source, &body, capabilities, state, kimap, http_server);
@ -162,12 +198,12 @@ fn handle_request(
source: &Address,
body: &[u8],
capabilities: Vec<Capability>,
state: &mut ContactsState,
state: &mut VersionedState,
kimap: &kimap::Kimap,
http_server: &mut http::server::HttpServer,
) {
// source node is ALWAYS ourselves since networking is disabled
if source.process == "http_server:distro:sys" {
if source.process == "http-server:distro:sys" {
// receive HTTP requests and websocket connection messages from our server
let server_request = http_server.parse_request(body).unwrap();
@ -192,7 +228,7 @@ fn handle_request(
/// Handle HTTP requests from our own frontend.
fn handle_http_request(
state: &mut ContactsState,
state: &mut VersionedState,
kimap: &kimap::Kimap,
http_request: &http::server::IncomingHttpRequest,
) -> (http::server::HttpResponse, Option<LazyLoadBlob>) {
@ -239,7 +275,7 @@ fn handle_http_request(
}
fn handle_contacts_request(
state: &mut ContactsState,
state: &mut VersionedState,
kimap: &kimap::Kimap,
request_bytes: &[u8],
capabilities: Option<Vec<Capability>>,
@ -254,7 +290,7 @@ fn handle_contacts_request(
// each request requires one of read-name-only, read, add, or remove
if let Some(capabilities) = capabilities {
let required_capability = Capability::new(
&state.our,
state.our(),
serde_json::to_string(&match request {
contacts::Request::GetNames => contacts::Capability::ReadNameOnly,
contacts::Request::GetAllContacts | contacts::Request::GetContact(_) => {

View File

@ -6,11 +6,11 @@ publish = false
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.9.2"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
kinode_process_lib = "0.10.0"
process_macros = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
wit-bindgen = "0.36.0"
[lib]
crate-type = ["cdylib"]

View File

@ -4,13 +4,13 @@
"image": "",
"properties": {
"package_name": "contacts",
"current_version": "0.1.0",
"current_version": "1.0.0",
"publisher": "sys",
"mirrors": [],
"code_hashes": {
"0.1.0": ""
"1.0.0": ""
},
"wit_version": 0,
"wit_version": 1,
"dependencies": []
},
"external_url": "https://kinode.org",

View File

@ -7,14 +7,31 @@
"request_capabilities": [
"eth:distro:sys",
"homepage:homepage:sys",
"http_server:distro:sys",
"http-server:distro:sys",
"vfs:distro:sys"
],
"grant_capabilities": [
"eth:distro:sys",
"http_server:distro:sys",
"http-server:distro:sys",
"terminal:terminal:sys",
{
"process": "terminal:terminal:sys",
"params": "ReadNameOnly"
},
{
"process": "terminal:terminal:sys",
"params": "Read"
},
{
"process": "terminal:terminal:sys",
"params": "Add"
},
{
"process": "terminal:terminal:sys",
"params": "Remove"
},
"vfs:distro:sys"
],
"public": false
}
]
]

View File

@ -13,6 +13,6 @@
"grant_capabilities": [
"contacts:contacts:sys"
],
"wit_version": 0
"wit_version": 1
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,26 +0,0 @@
interface homepage {
/// The request format to add or remove an app from the homepage. You must have messaging
/// access to `homepage:homepage:sys` in order to perform this. Serialize using serde_json.
variant request {
/// the package and process name will come from request source.
/// the path will automatically have the process_id prepended.
/// the icon is a base64 encoded image.
add(add-request),
remove,
/// ONLY settings:settings:sys may call this request
/// (this is checked in-code)
set-stylesheet(string),
}
record add-request {
label: string,
icon: option<string>,
path: option<string>,
widget: option<string>,
}
}
world homepage-sys-v0 {
import homepage;
include process-v0;
}

View File

@ -0,0 +1,45 @@
interface homepage {
enum capability {
remove-other,
set-stylesheet,
}
/// The request format to add or remove an app from the homepage. You must have messaging
/// access to `homepage:homepage:sys` in order to perform this. Serialize using serde_json.
variant request {
/// the package and process name will come from request source.
/// the path will automatically have the process_id prepended.
/// the icon is a base64 encoded image.
///
/// lazy-load-blob: none.
add(add-request),
/// remove ourself from homepage (message source will be item removed)
///
/// lazy-load-blob: none.
remove,
/// remove another app from homepage
/// using this requires RemoveOther capability
/// app store uses this to remove apps on uninstall
///
/// lazy-load-blob: none.
remove-other(string),
/// set the stylesheet for the homepage
/// using this requires SetStylesheet capability
/// settings:settings:sys uses this to set the stylesheet
///
/// lazy-load-blob: none.
set-stylesheet(string),
}
record add-request {
label: string,
icon: option<string>,
path: option<string>,
widget: option<string>,
}
}
world homepage-sys-v1 {
import homepage;
include process-v1;
}

View File

@ -1,555 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
[[package]]
name = "bitflags"
version = "2.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
[[package]]
name = "bytes"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "equivalent"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
dependencies = [
"percent-encoding",
]
[[package]]
name = "getrandom"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "hashbrown"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "homepage"
version = "0.1.0"
dependencies = [
"anyhow",
"bincode",
"kinode_process_lib",
"serde",
"serde_json",
"wit-bindgen",
]
[[package]]
name = "http"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "id-arena"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005"
[[package]]
name = "idna"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
dependencies = [
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "indexmap"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "433de089bd45971eecf4668ee0ee8f4cec17db4f8bd8f7bc3197a6ce37aa7d9b"
dependencies = [
"equivalent",
"hashbrown",
"serde",
]
[[package]]
name = "itoa"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]]
name = "kinode_process_lib"
version = "0.5.6"
source = "git+https://github.com/kinode-dao/process_lib?rev=fccb6a0#fccb6a0c07ebda3e385bff7f76e4984b741f01c7"
dependencies = [
"anyhow",
"bincode",
"http",
"mime_guess",
"rand",
"serde",
"serde_json",
"thiserror",
"url",
"wit-bindgen",
]
[[package]]
name = "leb128"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "libc"
version = "0.2.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7"
[[package]]
name = "log"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
dependencies = [
"mime",
"unicase",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "ppv-lite86"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "ryu"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c"
[[package]]
name = "semver"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0"
[[package]]
name = "serde"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.113"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "smallvec"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
[[package]]
name = "spdx"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62bde1398b09b9f93fc2fc9b9da86e362693e999d3a54a8ac47a99a5a73f638b"
dependencies = [
"smallvec",
]
[[package]]
name = "syn"
version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "unicase"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
dependencies = [
"version_check",
]
[[package]]
name = "unicode-bidi"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-normalization"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "url"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-encoder"
version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ad2b51884de9c7f4fe2fd1043fccb8dcad4b1e29558146ee57a144d15779f3f"
dependencies = [
"leb128",
]
[[package]]
name = "wasm-encoder"
version = "0.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e09bca7d6388637d27fb5edbeab11f56bfabcef8743c55ae34370e1e5030a071"
dependencies = [
"leb128",
]
[[package]]
name = "wasm-metadata"
version = "0.10.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c853d3809fc9fccf3bc0ad63f4f51d8eefad0bacf88f957aa991c1d9b88b016e"
dependencies = [
"anyhow",
"indexmap",
"serde",
"serde_derive",
"serde_json",
"spdx",
"wasm-encoder 0.41.0",
"wasmparser 0.121.0",
]
[[package]]
name = "wasmparser"
version = "0.118.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95ee9723b928e735d53000dec9eae7b07a60e490c85ab54abb66659fc61bfcd9"
dependencies = [
"indexmap",
"semver",
]
[[package]]
name = "wasmparser"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "953cf6a7606ab31382cb1caa5ae403e77ba70c7f8e12eeda167e7040d42bfda8"
dependencies = [
"bitflags",
"indexmap",
"semver",
]
[[package]]
name = "wit-bindgen"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"bitflags",
"wit-bindgen-rust-macro",
]
[[package]]
name = "wit-bindgen-core"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"anyhow",
"wit-component",
"wit-parser",
]
[[package]]
name = "wit-bindgen-rust"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"anyhow",
"heck",
"wasm-metadata",
"wit-bindgen-core",
"wit-component",
]
[[package]]
name = "wit-bindgen-rust-macro"
version = "0.16.0"
source = "git+https://github.com/bytecodealliance/wit-bindgen?rev=efcc759#efcc7592cf3277bcb9be1034e48569c6d822b322"
dependencies = [
"anyhow",
"proc-macro2",
"quote",
"syn",
"wit-bindgen-core",
"wit-bindgen-rust",
"wit-component",
]
[[package]]
name = "wit-component"
version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b8a35a2a9992898c9d27f1664001860595a4bc99d32dd3599d547412e17d7e2"
dependencies = [
"anyhow",
"bitflags",
"indexmap",
"log",
"serde",
"serde_derive",
"serde_json",
"wasm-encoder 0.38.1",
"wasm-metadata",
"wasmparser 0.118.1",
"wit-parser",
]
[[package]]
name = "wit-parser"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df4913a2219096373fd6512adead1fb77ecdaa59d7fc517972a7d30b12f625be"
dependencies = [
"anyhow",
"id-arena",
"indexmap",
"log",
"semver",
"serde",
"serde_derive",
"serde_json",
"unicode-xid",
]

Some files were not shown because too many files have changed in this diff Show More