mirror of
https://github.com/uqbar-dao/nectar.git
synced 2024-12-30 03:52:50 +03:00
Merge branch 'v0.10.0' into hf/terminal-m-caps
This commit is contained in:
commit
12fd44cc7c
583
Cargo.lock
generated
583
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
19
Dockerfile
19
Dockerfile
@ -16,9 +16,26 @@ FROM downloader_${TARGETARCH} AS downloader
|
||||
|
||||
FROM debian:12-slim
|
||||
|
||||
RUN apt-get update && apt-get install openssl -y
|
||||
# Create a non-root user and group
|
||||
RUN groupadd -r kinode && \
|
||||
useradd -r -g kinode -d /kinode-home/home/kinode kinode
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install openssl -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create directory for kinode and set permissions
|
||||
RUN mkdir -p /kinode-home/home/kinode && \
|
||||
chown -R kinode:kinode /kinode-home
|
||||
|
||||
COPY --from=downloader /tmp/download/kinode /bin/kinode
|
||||
RUN chown kinode:kinode /bin/kinode && \
|
||||
chmod 755 /bin/kinode
|
||||
|
||||
# Switch to non-root user
|
||||
USER kinode
|
||||
|
||||
WORKDIR /kinode-home
|
||||
|
||||
ENTRYPOINT [ "/bin/kinode" ]
|
||||
CMD [ "/kinode-home" ]
|
||||
|
26
README.md
26
README.md
@ -202,20 +202,32 @@ To build a local Docker image, run the following command in this project root.
|
||||
|
||||
```bash
|
||||
# The `VERSION` may be replaced with the tag of a GitHub release
|
||||
export VERSION=0.9.8
|
||||
|
||||
# Build for your system's architecture
|
||||
docker build . -t 0xlynett/kinode --build-arg VERSION=v0.9.1
|
||||
docker build . -t kinode-${VERSION} --build-arg VERSION=v${VERSION} --platform linux/amd64
|
||||
|
||||
# Build a multiarch image
|
||||
docker buildx build . --platform arm64,amd64 --build-arg VERSION=v0.9.1 -t 0xlynett/kinode
|
||||
docker buildx build . -t kinode-${VERSION} --build-arg VERSION=v${VERSION} --platform arm64,amd64
|
||||
```
|
||||
|
||||
For example:
|
||||
To run, for example for a node named `helloworld.os`:
|
||||
|
||||
```bash
|
||||
docker volume create kinode-volume
|
||||
export NODENAME=helloworld.os
|
||||
|
||||
docker run -d -p 8080:8080 -it --name my-kinode \
|
||||
--mount type=volume,source=kinode-volume,destination=/kinode-home \
|
||||
0xlynett/kinode
|
||||
docker volume create kinode-${NODENAME}
|
||||
|
||||
docker run -p 8080:8080 --rm -it --name kinode-${NODENAME} --mount type=volume,source=kinode-${NODENAME},destination=/kinode-home kinode-${VERSION}
|
||||
```
|
||||
|
||||
which will launch your Kinode container attached to the terminal.
|
||||
Alternatively you can run it detached:
|
||||
```
|
||||
docker run -p 8080:8080 --rm -dt --name kinode-${NODENAME} --mount type=volume,source=kinode-${NODENAME},destination=/kinode-home kinode-${VERSION}
|
||||
```
|
||||
Note that the `-t` flag *must* be passed.
|
||||
If it is not passed, you must pass the `--detached` argument to the Kinode binary, i.e.
|
||||
```
|
||||
docker run -p 8080:8080 --rm -d --name kinode-${NODENAME} --mount type=volume,source=kinode-${NODENAME},destination=/kinode-home kinode-${VERSION} /kinode-home --detached
|
||||
```
|
||||
|
@ -1,3 +1,5 @@
|
||||
@import url('https://fonts.googleapis.com/css2?family=Kode+Mono:wght@700&display=swap');
|
||||
|
||||
/* CSS Reset and Base Styles */
|
||||
*,
|
||||
*::before,
|
||||
@ -23,8 +25,6 @@ select {
|
||||
font-family: var(--font-family-main);
|
||||
}
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Kode+Mono:wght@700&display=swap');
|
||||
|
||||
/* Variables */
|
||||
:root {
|
||||
color-scheme: light dark;
|
||||
|
@ -41,6 +41,7 @@ alloy-primitives = "0.7.6"
|
||||
alloy-sol-macro = "0.7.6"
|
||||
alloy-sol-types = "0.7.6"
|
||||
anyhow = "1.0.71"
|
||||
argon2 = "0.5.3"
|
||||
async-trait = "0.1.71"
|
||||
base64 = "0.22.0"
|
||||
bincode = "1.3.3"
|
||||
|
@ -30,14 +30,36 @@ fn compute_hash(file_path: &Path) -> anyhow::Result<String> {
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let path_to_packages_zip = match std::env::var("PATH_TO_PACKAGES_ZIP") {
|
||||
Ok(env_var) => env_var,
|
||||
Ok(env_var) => {
|
||||
let path = PathBuf::from(&env_var);
|
||||
if !path.exists() {
|
||||
let path = std::env::current_dir()?;
|
||||
let Some(path) = path.parent() else {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Given path to packages {env_var} not found (cwd: {:?})",
|
||||
std::env::current_dir()
|
||||
));
|
||||
};
|
||||
let path = path.join(&env_var);
|
||||
if path.exists() {
|
||||
path.display().to_string()
|
||||
} else {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Given path to packages {env_var} not found in parent of cwd: {:?}",
|
||||
std::env::current_dir()
|
||||
));
|
||||
}
|
||||
} else {
|
||||
env_var
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let canonical_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH);
|
||||
if canonical_path.exists() {
|
||||
p!("No path given via PATH_TO_PACKAGES_ZIP envvar. Defaulting to path of `kinode/target/packages.zip`.");
|
||||
CANONICAL_PACKAGES_ZIP_PATH.to_string()
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("You must build packages.zip with scripts/build_packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated)."));
|
||||
return Err(anyhow::anyhow!("You must build packages.zip with scripts/build-packages or set PATH_TO_PACKAGES_ZIP to point to your desired pacakges.zip (default path at kinode/target/packages.zip was not populated)."));
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -49,6 +71,10 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
let path_to_packages_zip_path = PathBuf::from(&path_to_packages_zip).canonicalize()?;
|
||||
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH);
|
||||
if !canonical_packages_zip_path.exists() {
|
||||
std::fs::File::create(&canonical_packages_zip_path)?;
|
||||
}
|
||||
let canonical_packages_zip_path = PathBuf::from(CANONICAL_PACKAGES_ZIP_PATH).canonicalize()?;
|
||||
if path_to_packages_zip_path != canonical_packages_zip_path {
|
||||
std::fs::copy(&path_to_packages_zip_path, &canonical_packages_zip_path)?;
|
||||
|
4
kinode/packages/app-store/Cargo.lock
generated
4
kinode/packages/app-store/Cargo.lock
generated
@ -1656,8 +1656,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -11,7 +11,7 @@ alloy-primitives = "0.7.6"
|
||||
alloy-sol-types = "0.7.6"
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -5,7 +5,7 @@
|
||||
use crate::{
|
||||
kinode::process::chain::{ChainRequests, ChainResponses},
|
||||
kinode::process::downloads::{
|
||||
DownloadRequests, DownloadResponses, LocalDownloadRequest, RemoveFileRequest,
|
||||
DownloadRequests, DownloadResponses, Entry, LocalDownloadRequest, RemoveFileRequest,
|
||||
},
|
||||
state::{MirrorCheck, PackageState, State},
|
||||
};
|
||||
@ -31,6 +31,7 @@ pub fn init_frontend(our: &Address, http_server: &mut server::HttpServer) {
|
||||
"/apps/:id", // detail about an on-chain app
|
||||
"/downloads/:id", // local downloads for an app
|
||||
"/installed/:id", // detail about an installed app
|
||||
"/manifest", // manifest of a downloaded app, id & version hash in query params
|
||||
// actions
|
||||
"/apps/:id/download", // download a listed app
|
||||
"/apps/:id/install", // install a downloaded app
|
||||
@ -190,6 +191,7 @@ fn make_widget() -> String {
|
||||
/// - get all apps we've published: GET /ourapps
|
||||
/// - get detail about a specific app: GET /apps/:id
|
||||
/// - get detail about a specific apps downloads: GET /downloads/:id
|
||||
/// - get manifest of a specific downloaded app: GET /manifest?id={id}&version_hash={version_hash}
|
||||
/// - remove a downloaded app: POST /downloads/:id/remove
|
||||
|
||||
/// - get online/offline mirrors for a listed app: GET /mirrorcheck/:node
|
||||
@ -225,8 +227,8 @@ pub fn handle_http_request(
|
||||
}
|
||||
}
|
||||
|
||||
fn get_package_id(url_params: &HashMap<String, String>) -> anyhow::Result<PackageId> {
|
||||
let Some(package_id) = url_params.get("id") else {
|
||||
fn get_package_id(params: &HashMap<String, String>) -> anyhow::Result<PackageId> {
|
||||
let Some(package_id) = params.get("id") else {
|
||||
return Err(anyhow::anyhow!("Missing id"));
|
||||
};
|
||||
|
||||
@ -246,6 +248,7 @@ fn gen_package_info(id: &PackageId, state: &PackageState) -> serde_json::Value {
|
||||
"our_version_hash": state.our_version_hash,
|
||||
"verified": state.verified,
|
||||
"caps_approved": state.caps_approved,
|
||||
"pending_update_hash": state.pending_update_hash,
|
||||
})
|
||||
}
|
||||
|
||||
@ -258,6 +261,7 @@ fn serve_paths(
|
||||
|
||||
let bound_path: &str = req.bound_path(Some(&our.process.to_string()));
|
||||
let url_params = req.url_params();
|
||||
let query_params = req.query_params();
|
||||
|
||||
match bound_path {
|
||||
// GET all apps
|
||||
@ -362,6 +366,73 @@ fn serve_paths(
|
||||
)),
|
||||
}
|
||||
}
|
||||
"/manifest" => {
|
||||
// get manifest of a downloaded app, version hash and id in query params
|
||||
let Ok(package_id) = get_package_id(query_params) else {
|
||||
return Ok((
|
||||
StatusCode::BAD_REQUEST,
|
||||
None,
|
||||
format!("Missing id in query params.").into_bytes(),
|
||||
));
|
||||
};
|
||||
|
||||
let Some(version_hash) = query_params.get("version_hash") else {
|
||||
return Ok((
|
||||
StatusCode::BAD_REQUEST,
|
||||
None,
|
||||
format!("Missing version_hash in query params.").into_bytes(),
|
||||
));
|
||||
};
|
||||
|
||||
let package_id = crate::kinode::process::main::PackageId::from_process_lib(package_id);
|
||||
|
||||
// get the file corresponding to the version hash, extract manifest and return.
|
||||
let resp = Request::to(("our", "downloads", "app_store", "sys"))
|
||||
.body(serde_json::to_vec(&DownloadRequests::GetFiles(Some(
|
||||
package_id.clone(),
|
||||
)))?)
|
||||
.send_and_await_response(5)??;
|
||||
|
||||
let msg = serde_json::from_slice::<DownloadResponses>(resp.body())?;
|
||||
match msg {
|
||||
DownloadResponses::GetFiles(files) => {
|
||||
let file_name = format!("{version_hash}.zip");
|
||||
let file_entry = files.into_iter().find(|entry| match entry {
|
||||
Entry::File(file) => file.name == file_name,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
match file_entry {
|
||||
Some(Entry::File(file)) => {
|
||||
let response = serde_json::json!({
|
||||
"package_id": package_id,
|
||||
"version_hash": version_hash,
|
||||
"manifest": file.manifest,
|
||||
});
|
||||
return Ok((StatusCode::OK, None, serde_json::to_vec(&response)?));
|
||||
}
|
||||
_ => {
|
||||
return Ok((
|
||||
StatusCode::NOT_FOUND,
|
||||
None,
|
||||
format!("File with version hash {} not found", version_hash)
|
||||
.into_bytes(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
DownloadResponses::Err(e) => Ok((
|
||||
StatusCode::NOT_FOUND,
|
||||
None,
|
||||
format!("Error from downloads: {:?}", e).into_bytes(),
|
||||
)),
|
||||
_ => Ok((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
None,
|
||||
format!("Invalid response from downloads: {:?}", msg).into_bytes(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
"/installed" => {
|
||||
let all: Vec<serde_json::Value> = state
|
||||
.packages
|
||||
|
@ -178,21 +178,30 @@ fn handle_message(
|
||||
let package_id = req.download_info.package_id;
|
||||
let version_hash = req.download_info.version_hash;
|
||||
|
||||
if let Some(package) = state.packages.get(&package_id.clone().to_process_lib()) {
|
||||
if package.manifest_hash == Some(manifest_hash) {
|
||||
print_to_terminal(1, "auto_install:main, manifest_hash match");
|
||||
if let Err(e) =
|
||||
utils::install(&package_id, None, &version_hash, state, &our.node)
|
||||
{
|
||||
print_to_terminal(1, &format!("error auto_installing package: {e}"));
|
||||
} else {
|
||||
println!(
|
||||
"auto_installed update for package: {:?}",
|
||||
&package_id.to_process_lib()
|
||||
);
|
||||
let process_lib_package_id = package_id.clone().to_process_lib();
|
||||
|
||||
// first, check if we have the package and get its manifest hash
|
||||
let should_auto_install = state
|
||||
.packages
|
||||
.get(&process_lib_package_id)
|
||||
.map(|package| package.manifest_hash == Some(manifest_hash.clone()))
|
||||
.unwrap_or(false);
|
||||
|
||||
if should_auto_install {
|
||||
if let Err(e) =
|
||||
utils::install(&package_id, None, &version_hash, state, &our.node)
|
||||
{
|
||||
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
|
||||
package.pending_update_hash = Some(version_hash);
|
||||
}
|
||||
println!("error auto-installing package: {e}");
|
||||
} else {
|
||||
print_to_terminal(1, "auto_install:main, manifest_hash do not match");
|
||||
println!("auto-installed update for package: {process_lib_package_id}");
|
||||
}
|
||||
} else {
|
||||
if let Some(package) = state.packages.get_mut(&process_lib_package_id) {
|
||||
package.pending_update_hash = Some(version_hash);
|
||||
println!("error auto-installing package: manifest hash mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,8 +54,19 @@ pub struct PackageState {
|
||||
/// capabilities have changed. if they have changed, auto-install must fail
|
||||
/// and the user must approve the new capabilities.
|
||||
pub manifest_hash: Option<String>,
|
||||
/// stores the version hash of a failed auto-install attempt, which can be
|
||||
/// later installed by the user by approving new caps.
|
||||
pub pending_update_hash: Option<String>,
|
||||
}
|
||||
|
||||
// this seems cleaner to me right now with pending_update_hash, but given how we serialize
|
||||
// the state to disk right now, with installed_apis and packages being populated directly
|
||||
// from the filesystem, not sure I'd like to serialize the whole of this state (maybe separate out the pending one?)
|
||||
// another option would be to have the download_api recheck the manifest hash? but not sure...
|
||||
// arbitrary complexity here.
|
||||
|
||||
// alternative is main loop doing this, storing it.
|
||||
|
||||
/// this process's saved state
|
||||
pub struct State {
|
||||
/// packages we have installed
|
||||
@ -122,6 +133,7 @@ impl State {
|
||||
verified: true, // implicitly verified (TODO re-evaluate)
|
||||
caps_approved: false, // must re-approve if you want to do something ??
|
||||
manifest_hash: Some(manifest_hash),
|
||||
pending_update_hash: None, // ... this could be a separate state saved. don't want to reflect this info on-disk as a file.
|
||||
},
|
||||
);
|
||||
|
||||
|
@ -225,6 +225,7 @@ pub fn install(
|
||||
verified: true, // sideloaded apps are implicitly verified because there is no "source" to verify against
|
||||
caps_approved: true, // TODO see if we want to auto-approve local installs
|
||||
manifest_hash: Some(manifest_hash),
|
||||
pending_update_hash: None, // TODO: doublecheck if problematically overwrites auto_update state.
|
||||
};
|
||||
|
||||
if let Ok(extracted) = extract_api(&process_package_id) {
|
||||
|
@ -11,7 +11,7 @@ alloy-primitives = "0.7.6"
|
||||
alloy-sol-types = "0.7.6"
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -59,7 +59,7 @@ const CHAIN_TIMEOUT: u64 = 60; // 60s
|
||||
#[cfg(not(feature = "simulation-mode"))]
|
||||
const KIMAP_ADDRESS: &'static str = kimap::KIMAP_ADDRESS; // optimism
|
||||
#[cfg(feature = "simulation-mode")]
|
||||
const KIMAP_ADDRESS: &str = "0xEce71a05B36CA55B895427cD9a440eEF7Cf3669D";
|
||||
const KIMAP_ADDRESS: &str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C";
|
||||
|
||||
const DELAY_MS: u64 = 1_000; // 1s
|
||||
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -209,19 +209,32 @@ fn handle_message(
|
||||
desired_version_hash,
|
||||
worker_address: our_worker.to_string(),
|
||||
}))
|
||||
.expects_response(60)
|
||||
.context(&download_request)
|
||||
.send()?;
|
||||
}
|
||||
DownloadRequests::RemoteDownload(download_request) => {
|
||||
// this is a node requesting a download from us.
|
||||
// check if we are mirroring. we should maybe implement some back and forth here.
|
||||
// small handshake for started? but we do not really want to wait for that in this loop..
|
||||
// might be okay. implement.
|
||||
let RemoteDownloadRequest {
|
||||
package_id,
|
||||
desired_version_hash,
|
||||
worker_address,
|
||||
} = download_request;
|
||||
|
||||
let process_lib_package_id = package_id.clone().to_process_lib();
|
||||
|
||||
// check if we are mirroring, if not send back an error.
|
||||
if !state.mirroring.contains(&process_lib_package_id) {
|
||||
let resp = DownloadResponses::Err(DownloadError::NotMirroring);
|
||||
Response::new().body(&resp).send()?;
|
||||
return Ok(()); // return here, todo unify remote and local responses?
|
||||
}
|
||||
|
||||
if !download_zip_exists(&process_lib_package_id, &desired_version_hash) {
|
||||
let resp = DownloadResponses::Err(DownloadError::FileNotFound);
|
||||
Response::new().body(&resp).send()?;
|
||||
return Ok(()); // return here, todo unify remote and local responses?
|
||||
}
|
||||
|
||||
let target_worker = Address::from_str(&worker_address)?;
|
||||
let _ = spawn_send_transfer(
|
||||
our,
|
||||
@ -230,6 +243,8 @@ fn handle_message(
|
||||
APP_SHARE_TIMEOUT,
|
||||
&target_worker,
|
||||
)?;
|
||||
let resp = DownloadResponses::Success;
|
||||
Response::new().body(&resp).send()?;
|
||||
}
|
||||
DownloadRequests::Progress(ref progress) => {
|
||||
// forward progress to main:app-store:sys,
|
||||
@ -428,11 +443,34 @@ fn handle_message(
|
||||
} else {
|
||||
match message.body().try_into()? {
|
||||
Resp::Download(download_response) => {
|
||||
// these are handled in line.
|
||||
print_to_terminal(
|
||||
1,
|
||||
&format!("got a weird download response: {:?}", download_response),
|
||||
);
|
||||
// get context of the response.
|
||||
// handled are errors or ok responses from a remote node.
|
||||
|
||||
if let Some(context) = message.context() {
|
||||
let download_request = serde_json::from_slice::<LocalDownloadRequest>(context)?;
|
||||
match download_response {
|
||||
DownloadResponses::Err(e) => {
|
||||
Request::to(("our", "main", "app_store", "sys"))
|
||||
.body(DownloadCompleteRequest {
|
||||
package_id: download_request.package_id.clone(),
|
||||
version_hash: download_request.desired_version_hash.clone(),
|
||||
err: Some(e),
|
||||
})
|
||||
.send()?;
|
||||
}
|
||||
DownloadResponses::Success => {
|
||||
// todo: maybe we do something here.
|
||||
print_to_terminal(
|
||||
1,
|
||||
&format!(
|
||||
"downloads: got success response from remote node: {:?}",
|
||||
download_request
|
||||
),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
Resp::HttpClient(resp) => {
|
||||
let Some(context) = message.context() else {
|
||||
@ -575,6 +613,22 @@ fn extract_and_write_manifest(file_contents: &[u8], manifest_path: &str) -> anyh
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if a download zip exists for a given package and version hash.
|
||||
/// Used to check if we can share a package or not!
|
||||
fn download_zip_exists(package_id: &PackageId, version_hash: &str) -> bool {
|
||||
let filename = format!(
|
||||
"/app_store:sys/downloads/{}:{}/{}.zip",
|
||||
package_id.package_name,
|
||||
package_id.publisher(),
|
||||
version_hash
|
||||
);
|
||||
let res = vfs::metadata(&filename, None);
|
||||
match res {
|
||||
Ok(meta) => meta.file_type == vfs::FileType::File,
|
||||
Err(_e) => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_manifest_hash(package_id: PackageId, version_hash: String) -> anyhow::Result<String> {
|
||||
let package_dir = format!("{}/{}", "/app-store:sys/downloads", package_id.to_string());
|
||||
let manifest_path = format!("{}/{}.json", package_dir, version_hash);
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -78,6 +78,7 @@ fn init(our: Address) {
|
||||
}
|
||||
|
||||
// killswitch timer, 2 minutes. sender or receiver gets killed/cleaned up.
|
||||
// TODO: killswitch update bubbles up to downloads process?
|
||||
timer::set_timer(120000, None);
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
@ -167,7 +168,11 @@ fn handle_receiver(
|
||||
package_id: &PackageId,
|
||||
version_hash: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
// TODO: write to a temporary location first, then check hash as we go, then rename to final location.
|
||||
let timer_address = Address::from_str("our@timer:distro:sys")?;
|
||||
|
||||
let mut file: Option<File> = None;
|
||||
let mut size: Option<u64> = None;
|
||||
let mut hasher = Sha256::new();
|
||||
|
||||
let package_dir = vfs::open_dir(
|
||||
&format!(
|
||||
@ -179,16 +184,6 @@ fn handle_receiver(
|
||||
None,
|
||||
)?;
|
||||
|
||||
let timer_address = Address::from_str("our@timer:distro:sys")?;
|
||||
|
||||
let mut file = vfs::open_file(
|
||||
&format!("{}{}.zip", &package_dir.path, version_hash),
|
||||
true,
|
||||
None,
|
||||
)?;
|
||||
let mut size: Option<u64> = None;
|
||||
let mut hasher = Sha256::new();
|
||||
|
||||
loop {
|
||||
let message = await_message()?;
|
||||
if *message.source() == timer_address {
|
||||
@ -200,7 +195,28 @@ fn handle_receiver(
|
||||
|
||||
match message.body().try_into()? {
|
||||
DownloadRequests::Chunk(chunk) => {
|
||||
handle_chunk(&mut file, &chunk, parent_process, &mut size, &mut hasher)?;
|
||||
let bytes = if let Some(blob) = get_blob() {
|
||||
blob.bytes
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("ft_worker: got no blob in chunk request"));
|
||||
};
|
||||
|
||||
if file.is_none() {
|
||||
file = Some(vfs::open_file(
|
||||
&format!("{}{}.zip", &package_dir.path, version_hash),
|
||||
true,
|
||||
None,
|
||||
)?);
|
||||
}
|
||||
|
||||
handle_chunk(
|
||||
file.as_mut().unwrap(),
|
||||
&chunk,
|
||||
parent_process,
|
||||
&mut size,
|
||||
&mut hasher,
|
||||
&bytes,
|
||||
)?;
|
||||
if let Some(s) = size {
|
||||
if chunk.offset + chunk.length >= s {
|
||||
let recieved_hash = format!("{:x}", hasher.finalize());
|
||||
@ -232,7 +248,7 @@ fn handle_receiver(
|
||||
let manifest_filename =
|
||||
format!("{}{}.json", package_dir.path, version_hash);
|
||||
|
||||
let contents = file.read()?;
|
||||
let contents = file.as_mut().unwrap().read()?;
|
||||
extract_and_write_manifest(&contents, &manifest_filename)?;
|
||||
|
||||
Request::new()
|
||||
@ -292,15 +308,10 @@ fn handle_chunk(
|
||||
parent: &Address,
|
||||
size: &mut Option<u64>,
|
||||
hasher: &mut Sha256,
|
||||
bytes: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let bytes = if let Some(blob) = get_blob() {
|
||||
blob.bytes
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("ft_worker: got no blob"));
|
||||
};
|
||||
|
||||
file.write_all(&bytes)?;
|
||||
hasher.update(&bytes);
|
||||
file.write_all(bytes)?;
|
||||
hasher.update(bytes);
|
||||
|
||||
if let Some(total_size) = size {
|
||||
// let progress = ((chunk.offset + chunk.length) as f64 / *total_size as f64 * 100.0) as u64;
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -2,13 +2,13 @@ import React from "react";
|
||||
import { BrowserRouter as Router, Route, Routes } from "react-router-dom";
|
||||
|
||||
import Header from "./components/Header";
|
||||
import { APP_DETAILS_PATH, DOWNLOAD_PATH, MY_DOWNLOADS_PATH, PUBLISH_PATH, STORE_PATH } from "./constants/path";
|
||||
import { APP_DETAILS_PATH, DOWNLOAD_PATH, MY_APPS_PATH, PUBLISH_PATH, STORE_PATH } from "./constants/path";
|
||||
|
||||
import StorePage from "./pages/StorePage";
|
||||
import AppPage from "./pages/AppPage";
|
||||
import DownloadPage from "./pages/DownloadPage";
|
||||
import PublishPage from "./pages/PublishPage";
|
||||
import MyDownloadsPage from "./pages/MyDownloadsPage";
|
||||
import MyAppsPage from "./pages/MyAppsPage";
|
||||
|
||||
|
||||
const BASE_URL = import.meta.env.BASE_URL;
|
||||
@ -22,7 +22,7 @@ function App() {
|
||||
<Header />
|
||||
<Routes>
|
||||
<Route path={STORE_PATH} element={<StorePage />} />
|
||||
<Route path={MY_DOWNLOADS_PATH} element={<MyDownloadsPage />} />
|
||||
<Route path={MY_APPS_PATH} element={<MyAppsPage />} />
|
||||
<Route path={`${APP_DETAILS_PATH}/:id`} element={<AppPage />} />
|
||||
<Route path={PUBLISH_PATH} element={<PublishPage />} />
|
||||
<Route path={`${DOWNLOAD_PATH}/:id`} element={<DownloadPage />} />
|
||||
|
@ -1,27 +1,28 @@
|
||||
import React from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { STORE_PATH, PUBLISH_PATH, MY_DOWNLOADS_PATH } from '../constants/path';
|
||||
import { STORE_PATH, PUBLISH_PATH, MY_APPS_PATH } from '../constants/path';
|
||||
import { ConnectButton } from '@rainbow-me/rainbowkit';
|
||||
import { FaHome } from "react-icons/fa";
|
||||
import NotificationBay from './NotificationBay';
|
||||
|
||||
const Header: React.FC = () => {
|
||||
return (
|
||||
<header className="app-header">
|
||||
<div className="header-left">
|
||||
<nav>
|
||||
<button onClick={() => window.location.href = '/'}>
|
||||
<button onClick={() => window.location.href = window.location.origin.replace('//app-store-sys.', '//') + '/'} className="home-button">
|
||||
<FaHome />
|
||||
</button>
|
||||
<Link to={STORE_PATH} className={location.pathname === STORE_PATH ? 'active' : ''}>Apps</Link>
|
||||
<Link to={PUBLISH_PATH} className={location.pathname === PUBLISH_PATH ? 'active' : ''}>Publish</Link>
|
||||
<Link to={MY_DOWNLOADS_PATH} className={location.pathname === MY_DOWNLOADS_PATH ? 'active' : ''}>My Downloads</Link>
|
||||
<Link to={MY_APPS_PATH} className={location.pathname === MY_APPS_PATH ? 'active' : ''}>My Apps</Link>
|
||||
</nav>
|
||||
</div>
|
||||
<div className="header-right">
|
||||
<NotificationBay />
|
||||
<ConnectButton />
|
||||
</div>
|
||||
</header>
|
||||
);
|
||||
};
|
||||
|
||||
export default Header;
|
@ -0,0 +1,97 @@
|
||||
import React, { useState } from 'react';
|
||||
import { ManifestResponse, PackageManifestEntry } from '../types/Apps';
|
||||
import { FaChevronDown, FaChevronRight, FaGlobe, FaLock, FaShieldAlt } from 'react-icons/fa';
|
||||
|
||||
interface ManifestDisplayProps {
|
||||
manifestResponse: ManifestResponse;
|
||||
}
|
||||
|
||||
const capabilityMap: Record<string, string> = {
|
||||
'vfs:distro:sys': 'Virtual Filesystem',
|
||||
'http_client:distro:sys': 'HTTP Client',
|
||||
'http_server:distro:sys': 'HTTP Server',
|
||||
'eth:distro:sys': 'Ethereum RPC access',
|
||||
'homepage:homepage:sys': 'Ability to add itself to homepage',
|
||||
'main:app_store:sys': 'App Store',
|
||||
'chain:app_store:sys': 'Chain',
|
||||
'terminal:terminal:sys': 'Terminal',
|
||||
};
|
||||
|
||||
// note: we can do some future regex magic mapping here too!
|
||||
// if includes("root") return WARNING
|
||||
const transformCapabilities = (capabilities: any[]) => {
|
||||
return capabilities.map(cap => capabilityMap[cap] || cap);
|
||||
};
|
||||
|
||||
|
||||
const ProcessManifest: React.FC<{ manifest: PackageManifestEntry }> = ({ manifest }) => {
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const hasCapabilities = manifest.request_capabilities.length > 0 || manifest.grant_capabilities.length > 0;
|
||||
|
||||
return (
|
||||
<div className="process-manifest">
|
||||
<button
|
||||
className="process-header"
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
>
|
||||
{isExpanded ? <FaChevronDown /> : <FaChevronRight />}
|
||||
<span className="process-name">{manifest.process_name}</span>
|
||||
<div className="process-indicators">
|
||||
{manifest.request_networking && (
|
||||
<FaGlobe title="Requests Network Access" className="network-icon" />
|
||||
)}
|
||||
{hasCapabilities && (
|
||||
<FaShieldAlt title="Has Capability Requirements" className="capability-icon" />
|
||||
)}
|
||||
{!manifest.public && (
|
||||
<FaLock title="Private Process" className="private-icon" />
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
|
||||
{isExpanded && (
|
||||
<div className="process-details">
|
||||
{manifest.request_capabilities.length > 0 && (
|
||||
<div className="capability-section">
|
||||
<h4>Requested Capabilities:</h4>
|
||||
<ul>
|
||||
{transformCapabilities(manifest.request_capabilities).map((cap, i) => (
|
||||
<li key={i}>{cap}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{manifest.grant_capabilities.length > 0 && (
|
||||
<div className="capability-section">
|
||||
<h4>Granted Capabilities:</h4>
|
||||
<ul>
|
||||
{transformCapabilities(manifest.grant_capabilities).map((cap, i) => (
|
||||
<li key={i}>{cap}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const ManifestDisplay: React.FC<ManifestDisplayProps> = ({ manifestResponse }) => {
|
||||
if (!manifestResponse) {
|
||||
return <p>No manifest data available.</p>;
|
||||
}
|
||||
|
||||
const parsedManifests: PackageManifestEntry[] = JSON.parse(manifestResponse.manifest);
|
||||
|
||||
return (
|
||||
<div className="manifest-display">
|
||||
{parsedManifests.map((manifest, index) => (
|
||||
<ProcessManifest key={index} manifest={manifest} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ManifestDisplay;
|
140
kinode/packages/app-store/ui/src/components/NotificationBay.tsx
Normal file
140
kinode/packages/app-store/ui/src/components/NotificationBay.tsx
Normal file
@ -0,0 +1,140 @@
|
||||
import React, { ReactNode, useState } from 'react';
|
||||
import { FaBell, FaChevronDown, FaChevronUp, FaTrash, FaTimes } from 'react-icons/fa';
|
||||
import useAppsStore from '../store';
|
||||
import { Notification, NotificationAction } from '../types/Apps';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
|
||||
|
||||
interface ModalProps {
|
||||
children: ReactNode;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
const Modal: React.FC<ModalProps> = ({ children, onClose }) => {
|
||||
return (
|
||||
<div className="modal-overlay">
|
||||
<div className="modal-content">
|
||||
<button className="modal-close" onClick={onClose}>
|
||||
<FaTimes />
|
||||
</button>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const NotificationBay: React.FC = () => {
|
||||
const { notifications, removeNotification } = useAppsStore();
|
||||
const hasErrors = notifications.some(n => n.type === 'error');
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const [modalContent, setModalContent] = useState<React.ReactNode | null>(null);
|
||||
const navigate = useNavigate();
|
||||
|
||||
const handleActionClick = (action: NotificationAction) => {
|
||||
switch (action.action.type) {
|
||||
case 'modal':
|
||||
const content = typeof action.action.modalContent === 'function'
|
||||
? action.action.modalContent()
|
||||
: action.action.modalContent;
|
||||
setModalContent(content);
|
||||
break;
|
||||
case 'click':
|
||||
action.action.onClick?.();
|
||||
break;
|
||||
case 'redirect':
|
||||
if (action.action.path) {
|
||||
navigate(action.action.path);
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
const handleDismiss = (notificationId: string, event: React.MouseEvent) => {
|
||||
event.stopPropagation(); // Prevent event bubbling
|
||||
removeNotification(notificationId);
|
||||
};
|
||||
|
||||
const renderNotification = (notification: Notification) => {
|
||||
return (
|
||||
<div key={notification.id} className={`notification-item ${notification.type}`}>
|
||||
{notification.renderContent ? (
|
||||
notification.renderContent(notification)
|
||||
) : (
|
||||
<>
|
||||
<div className="notification-content">
|
||||
<p>{notification.message}</p>
|
||||
{notification.type === 'download' && notification.metadata?.progress && (
|
||||
<div className="progress-bar">
|
||||
<div
|
||||
className="progress"
|
||||
style={{ width: `${notification.metadata.progress}%` }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{notification.actions && (
|
||||
<div className="notification-actions">
|
||||
{notification.actions.map((action, index) => (
|
||||
<button
|
||||
key={index}
|
||||
onClick={() => handleActionClick(action)}
|
||||
className={`action-button ${action.variant || 'secondary'}`}
|
||||
>
|
||||
{action.icon && <action.icon />}
|
||||
{action.label}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!notification.persistent && (
|
||||
<button
|
||||
className="dismiss-button"
|
||||
onClick={(e) => handleDismiss(notification.id, e)}
|
||||
>
|
||||
<FaTrash />
|
||||
</button>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="notification-bay">
|
||||
<button
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
className={`notification-button ${hasErrors ? 'has-errors' : ''}`}
|
||||
>
|
||||
<FaBell />
|
||||
{notifications.length > 0 && (
|
||||
<span className={`badge ${hasErrors ? 'error-badge' : ''}`}>
|
||||
{notifications.length}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
|
||||
{isExpanded && (
|
||||
<div className="notification-details">
|
||||
{notifications.length === 0 ? (
|
||||
<p>All clear, no notifications!</p>
|
||||
) : (
|
||||
notifications.map(renderNotification)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{modalContent && (
|
||||
<Modal onClose={() => setModalContent(null)}>
|
||||
{modalContent}
|
||||
</Modal>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default NotificationBay;
|
@ -1,3 +1,5 @@
|
||||
export { default as Header } from './Header';
|
||||
export { default as MirrorSelector } from './MirrorSelector';
|
||||
export { default as PackageSelector } from './PackageSelector';
|
||||
export { default as PackageSelector } from './PackageSelector';
|
||||
export { default as ManifestDisplay } from './ManifestDisplay';
|
||||
export { default as NotificationBay } from './NotificationBay';
|
@ -2,4 +2,4 @@ export const STORE_PATH = '/';
|
||||
export const PUBLISH_PATH = '/publish';
|
||||
export const APP_DETAILS_PATH = '/app';
|
||||
export const DOWNLOAD_PATH = '/download';
|
||||
export const MY_DOWNLOADS_PATH = '/my-downloads';
|
||||
export const MY_APPS_PATH = '/my-apps';
|
||||
|
@ -49,6 +49,13 @@ a:hover {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.header-right {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
/* Provides consistent spacing between NotificationBay and ConnectButton */
|
||||
}
|
||||
|
||||
.header-left h1 {
|
||||
margin: 0;
|
||||
font-size: 1.5rem;
|
||||
@ -136,6 +143,8 @@ td {
|
||||
.app-icon {
|
||||
width: 64px;
|
||||
height: 64px;
|
||||
min-width: 64px;
|
||||
min-height: 64px;
|
||||
object-fit: cover;
|
||||
border-radius: var(--border-radius);
|
||||
}
|
||||
@ -348,6 +357,13 @@ td {
|
||||
padding-bottom: 1rem;
|
||||
}
|
||||
|
||||
.home-button {
|
||||
min-width: 48px;
|
||||
min-height: 48px;
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
}
|
||||
|
||||
.app-screenshot {
|
||||
max-width: 200px;
|
||||
height: auto;
|
||||
@ -423,4 +439,278 @@ td {
|
||||
|
||||
.fa-spin {
|
||||
animation: spin 1s linear infinite;
|
||||
}
|
||||
|
||||
.manifest-display {
|
||||
background: light-dark(var(--white), var(--tasteful-dark));
|
||||
border-radius: var(--border-radius);
|
||||
padding: 1rem;
|
||||
max-width: 600px;
|
||||
}
|
||||
|
||||
.process-manifest {
|
||||
margin-bottom: 0.5rem;
|
||||
border: 1px solid light-dark(var(--gray), var(--off-black));
|
||||
border-radius: var(--border-radius);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.process-header {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.75rem 1rem;
|
||||
background: none;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
color: light-dark(var(--off-black), var(--off-white));
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.process-header:hover {
|
||||
background: light-dark(var(--tan), var(--off-black));
|
||||
}
|
||||
|
||||
.process-name {
|
||||
flex: 1;
|
||||
text-align: left;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.process-indicators {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
color: light-dark(var(--gray), var(--off-white));
|
||||
}
|
||||
|
||||
.network-icon {
|
||||
color: var(--orange);
|
||||
}
|
||||
|
||||
.capability-icon {
|
||||
color: var(--blue);
|
||||
}
|
||||
|
||||
.private-icon {
|
||||
color: var(--gray);
|
||||
}
|
||||
|
||||
.process-details {
|
||||
padding: 1rem;
|
||||
background: light-dark(var(--tan), var(--off-black));
|
||||
border-top: 1px solid light-dark(var(--gray), var(--off-black));
|
||||
}
|
||||
|
||||
.capability-section {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.capability-section:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.capability-section h4 {
|
||||
margin: 0 0 0.5rem 0;
|
||||
color: light-dark(var(--off-black), var(--off-white));
|
||||
}
|
||||
|
||||
.capability-section ul {
|
||||
margin: 0;
|
||||
padding-left: 1.5rem;
|
||||
color: light-dark(var(--gray), var(--off-white));
|
||||
}
|
||||
|
||||
.capability-section li {
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.notification-bay {
|
||||
position: relative;
|
||||
margin-right: 1rem;
|
||||
}
|
||||
|
||||
.notification-button {
|
||||
background: none;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem;
|
||||
color: light-dark(var(--off-black), var(--off-white));
|
||||
}
|
||||
|
||||
.notification-details {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
right: 0;
|
||||
width: 320px;
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
|
||||
z-index: 1000;
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
||||
.badge {
|
||||
background-color: var(--orange);
|
||||
color: var(--white);
|
||||
border-radius: 50%;
|
||||
padding: 0.25rem 0.5rem;
|
||||
font-size: 0.75rem;
|
||||
min-width: 1.5rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.notification-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 1rem;
|
||||
margin: 0.5rem 0;
|
||||
border-radius: var(--border-radius);
|
||||
background-color: light-dark(var(--tan), var(--off-black));
|
||||
color: light-dark(var(--off-black), var(--off-white));
|
||||
}
|
||||
|
||||
.notification-item.error {
|
||||
background-color: light-dark(#ffe6e6, #4a2020);
|
||||
}
|
||||
|
||||
.notification-item.success {
|
||||
background-color: light-dark(#e6ffe6, #204a20);
|
||||
}
|
||||
|
||||
.notification-item.warning {
|
||||
background-color: light-dark(#fff3e6, #4a3820);
|
||||
}
|
||||
|
||||
.notification-item.download {
|
||||
background-color: light-dark(#e6f3ff, #20304a);
|
||||
}
|
||||
|
||||
.notification-content {
|
||||
flex: 1;
|
||||
margin-right: 1rem;
|
||||
}
|
||||
|
||||
.notification-actions {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.dismiss-button {
|
||||
background: none;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
color: light-dark(var(--gray), var(--off-white));
|
||||
padding: 0.25rem;
|
||||
}
|
||||
|
||||
.dismiss-button:hover {
|
||||
color: var(--orange);
|
||||
}
|
||||
|
||||
.progress-bar {
|
||||
margin-top: 0.5rem;
|
||||
height: 4px;
|
||||
background-color: light-dark(var(--white), var(--off-black));
|
||||
border-radius: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.progress {
|
||||
height: 100%;
|
||||
background-color: var(--orange);
|
||||
transition: width 0.3s ease;
|
||||
}
|
||||
|
||||
/* Modal styles */
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1100;
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background-color: light-dark(var(--white), var(--tasteful-dark));
|
||||
color: light-dark(var(--off-black), var(--off-white));
|
||||
padding: 1.5rem;
|
||||
border-radius: var(--border-radius);
|
||||
position: relative;
|
||||
max-width: 80%;
|
||||
max-height: 80vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.modal-close {
|
||||
position: absolute;
|
||||
top: 0.75rem;
|
||||
right: 0.75rem;
|
||||
background: none;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
color: light-dark(var(--gray), var(--off-white));
|
||||
padding: 0.25rem;
|
||||
}
|
||||
|
||||
.modal-close:hover {
|
||||
color: var(--orange);
|
||||
}
|
||||
|
||||
.notification-button.has-errors {
|
||||
animation: shake 0.82s cubic-bezier(.36, .07, .19, .97) both;
|
||||
}
|
||||
|
||||
.badge.error-badge {
|
||||
background-color: var(--error-red);
|
||||
animation: pulse 2s infinite;
|
||||
}
|
||||
|
||||
@keyframes shake {
|
||||
|
||||
10%,
|
||||
90% {
|
||||
transform: translate3d(-1px, 0, 0);
|
||||
}
|
||||
|
||||
20%,
|
||||
80% {
|
||||
transform: translate3d(2px, 0, 0);
|
||||
}
|
||||
|
||||
30%,
|
||||
50%,
|
||||
70% {
|
||||
transform: translate3d(-4px, 0, 0);
|
||||
}
|
||||
|
||||
40%,
|
||||
60% {
|
||||
transform: translate3d(4px, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
50% {
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
100% {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
@ -62,7 +62,7 @@ export default function AppPage() {
|
||||
if (app) {
|
||||
const launchUrl = getLaunchUrl(`${app.package_id.package_name}:${app.package_id.publisher_node}`);
|
||||
if (launchUrl) {
|
||||
window.location.href = launchUrl;
|
||||
window.location.href = window.location.origin.replace('//app-store-sys.', '//') + launchUrl;
|
||||
}
|
||||
}
|
||||
}, [app, getLaunchUrl]);
|
||||
|
@ -2,7 +2,8 @@ import React, { useState, useEffect, useCallback, useMemo } from "react";
|
||||
import { useParams } from "react-router-dom";
|
||||
import { FaDownload, FaSpinner, FaChevronDown, FaChevronUp, FaRocket, FaTrash, FaPlay } from "react-icons/fa";
|
||||
import useAppsStore from "../store";
|
||||
import { MirrorSelector } from '../components';
|
||||
import { MirrorSelector, ManifestDisplay } from '../components';
|
||||
import { ManifestResponse } from "../types/Apps";
|
||||
|
||||
export default function DownloadPage() {
|
||||
const { id } = useParams<{ id: string }>();
|
||||
@ -17,7 +18,7 @@ export default function DownloadPage() {
|
||||
removeDownload,
|
||||
clearAllActiveDownloads,
|
||||
fetchHomepageApps,
|
||||
getLaunchUrl
|
||||
getLaunchUrl,
|
||||
} = useAppsStore();
|
||||
|
||||
const [showMetadata, setShowMetadata] = useState(false);
|
||||
@ -26,7 +27,7 @@ export default function DownloadPage() {
|
||||
const [showMyDownloads, setShowMyDownloads] = useState(false);
|
||||
const [isMirrorOnline, setIsMirrorOnline] = useState<boolean | null>(null);
|
||||
const [showCapApproval, setShowCapApproval] = useState(false);
|
||||
const [manifest, setManifest] = useState<any>(null);
|
||||
const [manifestResponse, setManifestResponse] = useState<ManifestResponse | null>(null);
|
||||
const [isInstalling, setIsInstalling] = useState(false);
|
||||
const [isCheckingLaunch, setIsCheckingLaunch] = useState(false);
|
||||
const [launchPath, setLaunchPath] = useState<string | null>(null);
|
||||
@ -151,8 +152,12 @@ export default function DownloadPage() {
|
||||
const download = appDownloads.find(d => d.File && d.File.name === `${hash}.zip`);
|
||||
if (download?.File?.manifest) {
|
||||
try {
|
||||
const manifestData = JSON.parse(download.File.manifest);
|
||||
setManifest(manifestData);
|
||||
const manifest_response: ManifestResponse = {
|
||||
package_id: app.package_id,
|
||||
version_hash: hash,
|
||||
manifest: download.File.manifest
|
||||
};
|
||||
setManifestResponse(manifest_response);
|
||||
setShowCapApproval(true);
|
||||
} catch (error) {
|
||||
console.error('Failed to parse manifest:', error);
|
||||
@ -170,7 +175,7 @@ export default function DownloadPage() {
|
||||
setLaunchPath(null);
|
||||
installApp(id, versionData.hash).then(() => {
|
||||
setShowCapApproval(false);
|
||||
setManifest(null);
|
||||
setManifestResponse(null);
|
||||
fetchData(id);
|
||||
});
|
||||
}
|
||||
@ -178,7 +183,7 @@ export default function DownloadPage() {
|
||||
|
||||
const handleLaunch = useCallback(() => {
|
||||
if (launchPath) {
|
||||
window.location.href = launchPath;
|
||||
window.location.href = window.location.origin.replace('//app-store-sys.', '//') + launchPath;
|
||||
}
|
||||
}, [launchPath]);
|
||||
|
||||
@ -337,13 +342,11 @@ export default function DownloadPage() {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showCapApproval && manifest && (
|
||||
{showCapApproval && manifestResponse && (
|
||||
<div className="cap-approval-popup">
|
||||
<div className="cap-approval-content">
|
||||
<h3>Approve Capabilities</h3>
|
||||
<pre className="json-display">
|
||||
{JSON.stringify(manifest[0]?.request_capabilities || [], null, 2)}
|
||||
</pre>
|
||||
<ManifestDisplay manifestResponse={manifestResponse} />
|
||||
<div className="approval-buttons">
|
||||
<button onClick={() => setShowCapApproval(false)}>Cancel</button>
|
||||
<button onClick={confirmInstall}>
|
||||
@ -354,7 +357,6 @@ export default function DownloadPage() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
<div className="app-details">
|
||||
<h3>App Details</h3>
|
||||
<button onClick={() => setShowMetadata(!showMetadata)}>
|
||||
|
341
kinode/packages/app-store/ui/src/pages/MyAppsPage.tsx
Normal file
341
kinode/packages/app-store/ui/src/pages/MyAppsPage.tsx
Normal file
@ -0,0 +1,341 @@
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck, FaTrash } from "react-icons/fa";
|
||||
import useAppsStore from "../store";
|
||||
import { DownloadItem, PackageManifest, PackageState } from "../types/Apps";
|
||||
|
||||
// Core packages that cannot be uninstalled
|
||||
const CORE_PACKAGES = [
|
||||
"app_store:sys",
|
||||
"contacts:sys",
|
||||
"kino_updates:sys",
|
||||
"terminal:sys",
|
||||
"chess:sys",
|
||||
"kns_indexer:sys",
|
||||
"settings:sys",
|
||||
"homepage:sys"
|
||||
];
|
||||
|
||||
export default function MyAppsPage() {
|
||||
const {
|
||||
fetchDownloads,
|
||||
fetchDownloadsForApp,
|
||||
startMirroring,
|
||||
stopMirroring,
|
||||
installApp,
|
||||
removeDownload,
|
||||
fetchInstalled,
|
||||
installed,
|
||||
uninstallApp
|
||||
} = useAppsStore();
|
||||
|
||||
const [currentPath, setCurrentPath] = useState<string[]>([]);
|
||||
const [items, setItems] = useState<DownloadItem[]>([]);
|
||||
const [isInstalling, setIsInstalling] = useState(false);
|
||||
const [isUninstalling, setIsUninstalling] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showCapApproval, setShowCapApproval] = useState(false);
|
||||
const [manifest, setManifest] = useState<PackageManifest | null>(null);
|
||||
const [selectedItem, setSelectedItem] = useState<DownloadItem | null>(null);
|
||||
const [showUninstallConfirm, setShowUninstallConfirm] = useState(false);
|
||||
const [appToUninstall, setAppToUninstall] = useState<any>(null);
|
||||
|
||||
useEffect(() => {
|
||||
loadItems();
|
||||
fetchInstalled();
|
||||
}, [currentPath]);
|
||||
|
||||
const loadItems = async () => {
|
||||
try {
|
||||
let downloads: DownloadItem[];
|
||||
if (currentPath.length === 0) {
|
||||
downloads = await fetchDownloads();
|
||||
} else {
|
||||
downloads = await fetchDownloadsForApp(currentPath.join(':'));
|
||||
}
|
||||
setItems(downloads);
|
||||
} catch (error) {
|
||||
console.error("Error loading items:", error);
|
||||
setError(`Error loading items: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const initiateUninstall = (app: any) => {
|
||||
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
||||
if (CORE_PACKAGES.includes(packageId)) {
|
||||
setError("Cannot uninstall core system packages");
|
||||
return;
|
||||
}
|
||||
setAppToUninstall(app);
|
||||
setShowUninstallConfirm(true);
|
||||
};
|
||||
|
||||
const handleUninstall = async () => {
|
||||
if (!appToUninstall) return;
|
||||
setIsUninstalling(true);
|
||||
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
|
||||
try {
|
||||
await uninstallApp(packageId);
|
||||
await fetchInstalled();
|
||||
await loadItems();
|
||||
setShowUninstallConfirm(false);
|
||||
setAppToUninstall(null);
|
||||
} catch (error) {
|
||||
console.error('Uninstallation failed:', error);
|
||||
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
|
||||
} finally {
|
||||
setIsUninstalling(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const navigateToItem = (item: DownloadItem) => {
|
||||
if (item.Dir) {
|
||||
setCurrentPath([...currentPath, item.Dir.name]);
|
||||
}
|
||||
};
|
||||
|
||||
const navigateUp = () => {
|
||||
setCurrentPath(currentPath.slice(0, -1));
|
||||
};
|
||||
|
||||
const toggleMirroring = async (item: DownloadItem) => {
|
||||
if (item.Dir) {
|
||||
const packageId = [...currentPath, item.Dir.name].join(':');
|
||||
try {
|
||||
if (item.Dir.mirroring) {
|
||||
await stopMirroring(packageId);
|
||||
} else {
|
||||
await startMirroring(packageId);
|
||||
}
|
||||
await loadItems();
|
||||
} catch (error) {
|
||||
console.error("Error toggling mirroring:", error);
|
||||
setError(`Error toggling mirroring: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleInstall = async (item: DownloadItem) => {
|
||||
if (item.File) {
|
||||
setSelectedItem(item);
|
||||
try {
|
||||
const manifestData = JSON.parse(item.File.manifest);
|
||||
setManifest(manifestData);
|
||||
setShowCapApproval(true);
|
||||
} catch (error) {
|
||||
console.error('Failed to parse manifest:', error);
|
||||
setError(`Failed to parse manifest: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const confirmInstall = async () => {
|
||||
if (!selectedItem?.File) return;
|
||||
setIsInstalling(true);
|
||||
setError(null);
|
||||
try {
|
||||
const fileName = selectedItem.File.name;
|
||||
const parts = fileName.split(':');
|
||||
const versionHash = parts.pop()?.replace('.zip', '');
|
||||
|
||||
if (!versionHash) throw new Error('Invalid file name format');
|
||||
|
||||
const packageId = [...currentPath, ...parts].join(':');
|
||||
|
||||
await installApp(packageId, versionHash);
|
||||
await fetchInstalled();
|
||||
setShowCapApproval(false);
|
||||
await loadItems();
|
||||
} catch (error) {
|
||||
console.error('Installation failed:', error);
|
||||
setError(`Installation failed: ${error instanceof Error ? error.message : String(error)}`);
|
||||
} finally {
|
||||
setIsInstalling(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRemoveDownload = async (item: DownloadItem) => {
|
||||
if (item.File) {
|
||||
try {
|
||||
const packageId = currentPath.join(':');
|
||||
const versionHash = item.File.name.replace('.zip', '');
|
||||
await removeDownload(packageId, versionHash);
|
||||
await loadItems();
|
||||
} catch (error) {
|
||||
console.error('Failed to remove download:', error);
|
||||
setError(`Failed to remove download: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const isAppInstalled = (name: string): boolean => {
|
||||
const packageName = name.replace('.zip', '');
|
||||
return Object.values(installed).some(app => app.package_id.package_name === packageName);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="downloads-page">
|
||||
<h2>My Apps</h2>
|
||||
|
||||
{/* Installed Apps Section */}
|
||||
<div className="file-explorer">
|
||||
<h3>Installed Apps</h3>
|
||||
<table className="downloads-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Package ID</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{Object.values(installed).map((app) => {
|
||||
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
||||
const isCore = CORE_PACKAGES.includes(packageId);
|
||||
return (
|
||||
<tr key={packageId}>
|
||||
<td>{packageId}</td>
|
||||
<td>
|
||||
{isCore ? (
|
||||
<span className="core-package">Core Package</span>
|
||||
) : (
|
||||
<button
|
||||
onClick={() => initiateUninstall(app)}
|
||||
disabled={isUninstalling}
|
||||
>
|
||||
{isUninstalling ? <FaSpinner className="fa-spin" /> : <FaTrash />}
|
||||
Uninstall
|
||||
</button>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{/* Downloads Section */}
|
||||
<div className="file-explorer">
|
||||
<h3>Downloads</h3>
|
||||
<div className="path-navigation">
|
||||
{currentPath.length > 0 && (
|
||||
<button onClick={navigateUp} className="navigate-up">
|
||||
<FaChevronLeft /> Back
|
||||
</button>
|
||||
)}
|
||||
<span className="current-path">/{currentPath.join('/')}</span>
|
||||
</div>
|
||||
<table className="downloads-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Type</th>
|
||||
<th>Size</th>
|
||||
<th>Mirroring</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{items.map((item, index) => {
|
||||
const isFile = !!item.File;
|
||||
const name = isFile ? item.File!.name : item.Dir!.name;
|
||||
const isInstalled = isFile && isAppInstalled(name);
|
||||
return (
|
||||
<tr key={index} onClick={() => navigateToItem(item)} className={isFile ? 'file' : 'directory'}>
|
||||
<td>
|
||||
{isFile ? <FaFile /> : <FaFolder />} {name}
|
||||
</td>
|
||||
<td>{isFile ? 'File' : 'Directory'}</td>
|
||||
<td>{isFile ? `${(item.File!.size / 1024).toFixed(2)} KB` : '-'}</td>
|
||||
<td>{!isFile && (item.Dir!.mirroring ? 'Yes' : 'No')}</td>
|
||||
<td>
|
||||
{!isFile && (
|
||||
<button onClick={(e) => { e.stopPropagation(); toggleMirroring(item); }}>
|
||||
<FaSync /> {item.Dir!.mirroring ? 'Stop' : 'Start'} Mirroring
|
||||
</button>
|
||||
)}
|
||||
{isFile && !isInstalled && (
|
||||
<>
|
||||
<button onClick={(e) => { e.stopPropagation(); handleInstall(item); }}>
|
||||
<FaRocket /> Install
|
||||
</button>
|
||||
<button onClick={(e) => { e.stopPropagation(); handleRemoveDownload(item); }}>
|
||||
<FaTrash /> Delete
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{isFile && isInstalled && (
|
||||
<FaCheck className="installed" />
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="error-message">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Uninstall Confirmation Modal */}
|
||||
{showUninstallConfirm && appToUninstall && (
|
||||
<div className="cap-approval-popup">
|
||||
<div className="cap-approval-content">
|
||||
<h3>Confirm Uninstall</h3>
|
||||
<div className="warning-message">
|
||||
Are you sure you want to uninstall this app?
|
||||
</div>
|
||||
<div className="package-info">
|
||||
<strong>Package ID:</strong> {`${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`}
|
||||
</div>
|
||||
{appToUninstall.metadata?.name && (
|
||||
<div className="package-info">
|
||||
<strong>Name:</strong> {appToUninstall.metadata.name}
|
||||
</div>
|
||||
)}
|
||||
<div className="approval-buttons">
|
||||
<button
|
||||
onClick={() => {
|
||||
setShowUninstallConfirm(false);
|
||||
setAppToUninstall(null);
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
onClick={handleUninstall}
|
||||
disabled={isUninstalling}
|
||||
className="danger"
|
||||
>
|
||||
{isUninstalling ? <FaSpinner className="fa-spin" /> : 'Confirm Uninstall'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
|
||||
{showCapApproval && manifest && (
|
||||
<div className="cap-approval-popup">
|
||||
<div className="cap-approval-content">
|
||||
<h3>Approve Capabilities</h3>
|
||||
<pre className="json-display">
|
||||
{JSON.stringify(manifest[0]?.request_capabilities || [], null, 2)}
|
||||
</pre>
|
||||
<div className="approval-buttons">
|
||||
<button onClick={() => setShowCapApproval(false)}>Cancel</button>
|
||||
<button onClick={confirmInstall} disabled={isInstalling}>
|
||||
{isInstalling ? <FaSpinner className="fa-spin" /> : 'Approve and Install'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
@ -3,15 +3,41 @@ import { FaFolder, FaFile, FaChevronLeft, FaSync, FaRocket, FaSpinner, FaCheck,
|
||||
import useAppsStore from "../store";
|
||||
import { DownloadItem, PackageManifest, PackageState } from "../types/Apps";
|
||||
|
||||
export default function MyDownloadsPage() {
|
||||
const { fetchDownloads, fetchDownloadsForApp, startMirroring, stopMirroring, installApp, removeDownload, fetchInstalled, installed } = useAppsStore();
|
||||
// Core packages that cannot be uninstalled
|
||||
const CORE_PACKAGES = [
|
||||
"app_store:sys",
|
||||
"contacts:sys",
|
||||
"kino_updates:sys",
|
||||
"terminal:sys",
|
||||
"chess:sys",
|
||||
"kns_indexer:sys",
|
||||
"settings:sys",
|
||||
"homepage:sys"
|
||||
];
|
||||
|
||||
export default function MyAppsPage() {
|
||||
const {
|
||||
fetchDownloads,
|
||||
fetchDownloadsForApp,
|
||||
startMirroring,
|
||||
stopMirroring,
|
||||
installApp,
|
||||
removeDownload,
|
||||
fetchInstalled,
|
||||
installed,
|
||||
uninstallApp
|
||||
} = useAppsStore();
|
||||
|
||||
const [currentPath, setCurrentPath] = useState<string[]>([]);
|
||||
const [items, setItems] = useState<DownloadItem[]>([]);
|
||||
const [isInstalling, setIsInstalling] = useState(false);
|
||||
const [isUninstalling, setIsUninstalling] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [showCapApproval, setShowCapApproval] = useState(false);
|
||||
const [manifest, setManifest] = useState<PackageManifest | null>(null);
|
||||
const [selectedItem, setSelectedItem] = useState<DownloadItem | null>(null);
|
||||
const [showUninstallConfirm, setShowUninstallConfirm] = useState(false);
|
||||
const [appToUninstall, setAppToUninstall] = useState<any>(null);
|
||||
|
||||
useEffect(() => {
|
||||
loadItems();
|
||||
@ -33,6 +59,35 @@ export default function MyDownloadsPage() {
|
||||
}
|
||||
};
|
||||
|
||||
const initiateUninstall = (app: any) => {
|
||||
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
||||
if (CORE_PACKAGES.includes(packageId)) {
|
||||
setError("Cannot uninstall core system packages");
|
||||
return;
|
||||
}
|
||||
setAppToUninstall(app);
|
||||
setShowUninstallConfirm(true);
|
||||
};
|
||||
|
||||
const handleUninstall = async () => {
|
||||
if (!appToUninstall) return;
|
||||
setIsUninstalling(true);
|
||||
const packageId = `${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`;
|
||||
try {
|
||||
await uninstallApp(packageId);
|
||||
await fetchInstalled();
|
||||
await loadItems();
|
||||
setShowUninstallConfirm(false);
|
||||
setAppToUninstall(null);
|
||||
} catch (error) {
|
||||
console.error('Uninstallation failed:', error);
|
||||
setError(`Uninstallation failed: ${error instanceof Error ? error.message : String(error)}`);
|
||||
} finally {
|
||||
setIsUninstalling(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const navigateToItem = (item: DownloadItem) => {
|
||||
if (item.Dir) {
|
||||
setCurrentPath([...currentPath, item.Dir.name]);
|
||||
@ -85,7 +140,6 @@ export default function MyDownloadsPage() {
|
||||
|
||||
if (!versionHash) throw new Error('Invalid file name format');
|
||||
|
||||
// Construct packageId by combining currentPath and remaining parts of the filename
|
||||
const packageId = [...currentPath, ...parts].join(':');
|
||||
|
||||
await installApp(packageId, versionHash);
|
||||
@ -121,8 +175,48 @@ export default function MyDownloadsPage() {
|
||||
|
||||
return (
|
||||
<div className="downloads-page">
|
||||
<h2>Downloads</h2>
|
||||
<h2>My Apps</h2>
|
||||
|
||||
{/* Installed Apps Section */}
|
||||
<div className="file-explorer">
|
||||
<h3>Installed Apps</h3>
|
||||
<table className="downloads-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Package ID</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{Object.values(installed).map((app) => {
|
||||
const packageId = `${app.package_id.package_name}:${app.package_id.publisher_node}`;
|
||||
const isCore = CORE_PACKAGES.includes(packageId);
|
||||
return (
|
||||
<tr key={packageId}>
|
||||
<td>{packageId}</td>
|
||||
<td>
|
||||
{isCore ? (
|
||||
<span className="core-package">Core Package</span>
|
||||
) : (
|
||||
<button
|
||||
onClick={() => initiateUninstall(app)}
|
||||
disabled={isUninstalling}
|
||||
>
|
||||
{isUninstalling ? <FaSpinner className="fa-spin" /> : <FaTrash />}
|
||||
Uninstall
|
||||
</button>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{/* Downloads Section */}
|
||||
<div className="file-explorer">
|
||||
<h3>Downloads</h3>
|
||||
<div className="path-navigation">
|
||||
{currentPath.length > 0 && (
|
||||
<button onClick={navigateUp} className="navigate-up">
|
||||
@ -172,7 +266,8 @@ export default function MyDownloadsPage() {
|
||||
)}
|
||||
{isFile && isInstalled && (
|
||||
<FaCheck className="installed" />
|
||||
)} </td>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
@ -186,6 +281,45 @@ export default function MyDownloadsPage() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Uninstall Confirmation Modal */}
|
||||
{showUninstallConfirm && appToUninstall && (
|
||||
<div className="cap-approval-popup">
|
||||
<div className="cap-approval-content">
|
||||
<h3>Confirm Uninstall</h3>
|
||||
<div className="warning-message">
|
||||
Are you sure you want to uninstall this app?
|
||||
</div>
|
||||
<div className="package-info">
|
||||
<strong>Package ID:</strong> {`${appToUninstall.package_id.package_name}:${appToUninstall.package_id.publisher_node}`}
|
||||
</div>
|
||||
{appToUninstall.metadata?.name && (
|
||||
<div className="package-info">
|
||||
<strong>Name:</strong> {appToUninstall.metadata.name}
|
||||
</div>
|
||||
)}
|
||||
<div className="approval-buttons">
|
||||
<button
|
||||
onClick={() => {
|
||||
setShowUninstallConfirm(false);
|
||||
setAppToUninstall(null);
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
onClick={handleUninstall}
|
||||
disabled={isUninstalling}
|
||||
className="danger"
|
||||
>
|
||||
{isUninstalling ? <FaSpinner className="fa-spin" /> : 'Confirm Uninstall'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
|
||||
{showCapApproval && manifest && (
|
||||
<div className="cap-approval-popup">
|
||||
<div className="cap-approval-content">
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { create } from 'zustand'
|
||||
import { persist } from 'zustand/middleware'
|
||||
import { PackageState, AppListing, MirrorCheckFile, PackageManifest, DownloadItem, HomepageApp } from '../types/Apps'
|
||||
import { PackageState, AppListing, MirrorCheckFile, DownloadItem, HomepageApp, ManifestResponse, Notification } from '../types/Apps'
|
||||
import { HTTP_STATUS } from '../constants/http'
|
||||
import KinodeClientApi from "@kinode/client-api"
|
||||
import { WEBSOCKET_URL } from '../utils/ws'
|
||||
@ -13,6 +13,7 @@ interface AppsStore {
|
||||
downloads: Record<string, DownloadItem[]>
|
||||
ourApps: AppListing[]
|
||||
ws: KinodeClientApi
|
||||
notifications: Notification[]
|
||||
homepageApps: HomepageApp[]
|
||||
activeDownloads: Record<string, { downloaded: number, total: number }>
|
||||
|
||||
@ -29,11 +30,15 @@ interface AppsStore {
|
||||
fetchHomepageApps: () => Promise<void>
|
||||
getLaunchUrl: (id: string) => string | null
|
||||
|
||||
addNotification: (notification: Notification) => void;
|
||||
removeNotification: (id: string) => void;
|
||||
clearNotifications: () => void;
|
||||
|
||||
installApp: (id: string, version_hash: string) => Promise<void>
|
||||
uninstallApp: (id: string) => Promise<void>
|
||||
downloadApp: (id: string, version_hash: string, downloadFrom: string) => Promise<void>
|
||||
removeDownload: (packageId: string, versionHash: string) => Promise<void>
|
||||
getCaps: (id: string) => Promise<PackageManifest | null>
|
||||
getManifest: (id: string, version_hash: string) => Promise<ManifestResponse | null>
|
||||
approveCaps: (id: string) => Promise<void>
|
||||
startMirroring: (id: string) => Promise<void>
|
||||
stopMirroring: (id: string) => Promise<void>
|
||||
@ -52,6 +57,7 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
||||
ourApps: [],
|
||||
activeDownloads: {},
|
||||
homepageApps: [],
|
||||
notifications: [],
|
||||
|
||||
|
||||
fetchData: async (id: string) => {
|
||||
@ -282,14 +288,14 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
||||
}
|
||||
},
|
||||
|
||||
getCaps: async (id: string) => {
|
||||
getManifest: async (id: string, version_hash: string) => {
|
||||
try {
|
||||
const res = await fetch(`${BASE_URL}/apps/${id}/caps`);
|
||||
const res = await fetch(`${BASE_URL}/manifest?id=${id}&version_hash=${version_hash}`);
|
||||
if (res.status === HTTP_STATUS.OK) {
|
||||
return await res.json() as PackageManifest;
|
||||
return await res.json() as ManifestResponse;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error getting caps:", error);
|
||||
console.error("Error getting manifest:", error);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
@ -355,6 +361,18 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
||||
}));
|
||||
},
|
||||
|
||||
|
||||
addNotification: (notification) => set(state => ({
|
||||
notifications: [...state.notifications, notification]
|
||||
})),
|
||||
|
||||
removeNotification: (id) => set(state => ({
|
||||
notifications: state.notifications.filter(n => n.id !== id)
|
||||
})),
|
||||
|
||||
clearNotifications: () => set({ notifications: [] }),
|
||||
|
||||
|
||||
clearActiveDownload: (appId) => {
|
||||
set((state) => {
|
||||
const { [appId]: _, ...rest } = state.activeDownloads;
|
||||
@ -374,10 +392,48 @@ const useAppsStore = create<AppsStore>()((set, get) => ({
|
||||
const { package_id, version_hash, downloaded, total } = data.data;
|
||||
const appId = `${package_id.package_name}:${package_id.publisher_node}:${version_hash}`;
|
||||
get().setActiveDownload(appId, downloaded, total);
|
||||
|
||||
const existingNotification = get().notifications.find(
|
||||
n => n.id === `download-${appId}`
|
||||
);
|
||||
|
||||
if (existingNotification) {
|
||||
get().removeNotification(`download-${appId}`);
|
||||
}
|
||||
|
||||
get().addNotification({
|
||||
id: `download-${appId}`,
|
||||
type: 'download',
|
||||
message: `Downloading ${package_id.package_name}`,
|
||||
timestamp: Date.now(),
|
||||
metadata: {
|
||||
packageId: `${package_id.package_name}:${package_id.publisher_node}`,
|
||||
versionHash: version_hash,
|
||||
progress: Math.round((downloaded / total) * 100)
|
||||
}
|
||||
});
|
||||
} else if (data.kind === 'complete') {
|
||||
const { package_id, version_hash } = data.data;
|
||||
const { package_id, version_hash, error } = data.data;
|
||||
const appId = `${package_id.package_name}:${package_id.publisher_node}:${version_hash}`;
|
||||
get().clearActiveDownload(appId);
|
||||
get().removeNotification(`download-${appId}`);
|
||||
|
||||
if (error) {
|
||||
get().addNotification({
|
||||
id: `error-${appId}`,
|
||||
type: 'error',
|
||||
message: `Download failed for ${package_id.package_name}: ${error}`,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
} else {
|
||||
get().addNotification({
|
||||
id: `complete-${appId}`,
|
||||
type: 'success',
|
||||
message: `Download complete: ${package_id.package_name}`,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
get().fetchData(`${package_id.package_name}:${package_id.publisher_node}`);
|
||||
}
|
||||
} catch (error) {
|
||||
|
@ -1,3 +1,6 @@
|
||||
import { ReactNode } from "react";
|
||||
import { IconType } from "react-icons/lib";
|
||||
|
||||
export interface PackageId {
|
||||
package_name: string;
|
||||
publisher_node: string;
|
||||
@ -59,9 +62,10 @@ export interface PackageState {
|
||||
our_version_hash: string;
|
||||
verified: boolean;
|
||||
caps_approved: boolean;
|
||||
pending_update_hash?: string;
|
||||
}
|
||||
|
||||
export interface PackageManifest {
|
||||
export interface PackageManifestEntry {
|
||||
process_name: string
|
||||
process_wasm_path: string
|
||||
on_exit: string
|
||||
@ -71,6 +75,12 @@ export interface PackageManifest {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
export interface ManifestResponse {
|
||||
package_id: PackageId;
|
||||
version_hash: string;
|
||||
manifest: string;
|
||||
}
|
||||
|
||||
export interface HomepageApp {
|
||||
id: string;
|
||||
process: string;
|
||||
@ -83,3 +93,35 @@ export interface HomepageApp {
|
||||
order: number;
|
||||
favorite: boolean;
|
||||
}
|
||||
|
||||
|
||||
export type NotificationActionType = 'click' | 'modal' | 'popup' | 'redirect';
|
||||
|
||||
export type NotificationAction = {
|
||||
label: string;
|
||||
icon?: IconType;
|
||||
variant?: 'primary' | 'secondary' | 'danger';
|
||||
action: {
|
||||
type: NotificationActionType;
|
||||
onClick?: () => void;
|
||||
modalContent?: ReactNode | (() => ReactNode);
|
||||
popupContent?: ReactNode | (() => ReactNode);
|
||||
path?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type Notification = {
|
||||
id: string;
|
||||
type: 'error' | 'success' | 'warning' | 'info' | 'download' | 'install' | 'update';
|
||||
message: string;
|
||||
timestamp: number;
|
||||
metadata?: {
|
||||
packageId?: string;
|
||||
versionHash?: string;
|
||||
progress?: number;
|
||||
error?: string;
|
||||
};
|
||||
actions?: NotificationAction[];
|
||||
renderContent?: (notification: Notification) => ReactNode;
|
||||
persistent?: boolean;
|
||||
};
|
@ -3,7 +3,7 @@ const BASE_URL = "/main:app-store:sys/";
|
||||
|
||||
if (window.our) window.our.process = BASE_URL?.replace("/", "");
|
||||
|
||||
export const PROXY_TARGET = `${(import.meta.env.VITE_NODE_URL || `http://localhost:8080`)}${BASE_URL}`;
|
||||
export const PROXY_TARGET = `${(import.meta.env.VITE_NODE_URL || `http://localhost:8080`).replace(/\/+$/, '')}${BASE_URL}`;
|
||||
|
||||
// This env also has BASE_URL which should match the process + package name
|
||||
export const WEBSOCKET_URL = import.meta.env.DEV
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import { defineConfig, ViteDevServer } from 'vite'
|
||||
import { nodePolyfills } from 'vite-plugin-node-polyfills'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
@ -17,10 +17,15 @@ The format is "/" + "process_name:package_name:publisher_node"
|
||||
const BASE_URL = `/main:app-store:sys`;
|
||||
|
||||
// This is the proxy URL, it must match the node you are developing against
|
||||
const PROXY_URL = (process.env.VITE_NODE_URL || 'http://127.0.0.1:8080').replace('localhost', '127.0.0.1');
|
||||
const PROXY_URL = (process.env.VITE_NODE_URL || 'http://localhost:8080').replace(/\/$/, '');
|
||||
|
||||
const DEV_SERVER_PORT = 3000; // Hardcoded port for the dev server...
|
||||
|
||||
console.log('process.env.VITE_NODE_URL', process.env.VITE_NODE_URL, PROXY_URL);
|
||||
|
||||
const openUrl = `${PROXY_URL.replace(/:\d+$/, '')}:${DEV_SERVER_PORT}${BASE_URL}`;
|
||||
console.log('Server will run at:', openUrl);
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
nodePolyfills({
|
||||
@ -37,7 +42,8 @@ export default defineConfig({
|
||||
}
|
||||
},
|
||||
server: {
|
||||
open: true,
|
||||
open: openUrl,
|
||||
port: DEV_SERVER_PORT,
|
||||
proxy: {
|
||||
[`^${BASE_URL}/our.js`]: {
|
||||
target: PROXY_URL,
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
4
kinode/packages/chess/Cargo.lock
generated
4
kinode/packages/chess/Cargo.lock
generated
@ -1559,8 +1559,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
pleco = "0.5"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
4
kinode/packages/contacts/Cargo.lock
generated
4
kinode/packages/contacts/Cargo.lock
generated
@ -1514,8 +1514,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -6,7 +6,7 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -77,12 +77,7 @@ function populate_contacts(contacts) {
|
||||
|
||||
document.getElementById('back-button').addEventListener('click', () => {
|
||||
// set page to `/` while also removing the subdomain
|
||||
const url = new URL(window.location.href);
|
||||
if (url.hostname.split('.')[0] === 'contacts-sys') {
|
||||
url.hostname = url.hostname.split('.').slice(1).join('.');
|
||||
}
|
||||
url.pathname = '/';
|
||||
window.location.href = url.toString();
|
||||
window.location.href = window.location.origin.replace('//contacts-sys.', '//') + '/'
|
||||
});
|
||||
|
||||
document.getElementById('add-contact').addEventListener('submit', (e) => {
|
||||
|
4
kinode/packages/homepage/Cargo.lock
generated
4
kinode/packages/homepage/Cargo.lock
generated
@ -1503,8 +1503,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
4
kinode/packages/kino-updates/Cargo.lock
generated
4
kinode/packages/kino-updates/Cargo.lock
generated
@ -1515,8 +1515,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
url = "2.5.0"
|
||||
|
@ -1,4 +1,4 @@
|
||||
use kinode_process_lib::{call_init, http, timer, Address, Request};
|
||||
use kinode_process_lib::{call_init, http, println, timer, Address, Request};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
wit_bindgen::generate!({
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
url = "2.5.0"
|
||||
|
4
kinode/packages/kns-indexer/Cargo.lock
generated
4
kinode/packages/kns-indexer/Cargo.lock
generated
@ -1501,8 +1501,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -11,7 +11,7 @@ anyhow = "1.0"
|
||||
alloy-primitives = "0.7.0"
|
||||
alloy-sol-types = "0.7.0"
|
||||
hex = "0.4.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rmp-serde = "1.1.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -23,7 +23,7 @@ wit_bindgen::generate!({
|
||||
#[cfg(not(feature = "simulation-mode"))]
|
||||
const KIMAP_ADDRESS: &'static str = kimap::KIMAP_ADDRESS; // optimism
|
||||
#[cfg(feature = "simulation-mode")]
|
||||
const KIMAP_ADDRESS: &'static str = "0xEce71a05B36CA55B895427cD9a440eEF7Cf3669D"; // local
|
||||
const KIMAP_ADDRESS: &'static str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C"; // local
|
||||
|
||||
#[cfg(not(feature = "simulation-mode"))]
|
||||
const CHAIN_ID: u64 = kimap::KIMAP_CHAIN_ID; // optimism
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
4
kinode/packages/settings/Cargo.lock
generated
4
kinode/packages/settings/Cargo.lock
generated
@ -1491,8 +1491,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -10,7 +10,7 @@ simulation-mode = []
|
||||
anyhow = "1.0"
|
||||
base64 = "0.22.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rmp-serde = "1.2.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
4
kinode/packages/terminal/Cargo.lock
generated
4
kinode/packages/terminal/Cargo.lock
generated
@ -1659,8 +1659,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
wit-bindgen = "0.24.0"
|
||||
|
||||
[lib]
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
wit-bindgen = "0.24.0"
|
||||
|
||||
[lib]
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rmp-serde = "1.1.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -8,7 +8,7 @@ simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
clap = "4.4"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
regex = "1.10.3"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rmp-serde = "1.1.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rmp-serde = "1.1.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -7,7 +7,7 @@ edition = "2021"
|
||||
simulation-mode = []
|
||||
|
||||
[dependencies]
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rmp-serde = "1.1.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
wit-bindgen = "0.24.0"
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
rand = "0.8"
|
||||
regex = "1.10.3"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
clap = "4.4"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
4
kinode/packages/tester/Cargo.lock
generated
4
kinode/packages/tester/Cargo.lock
generated
@ -1491,8 +1491,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_process_lib"
|
||||
version = "0.9.4"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=778457a#778457ae52c934fd17e0e846127a9472a8e03170"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/kinode-dao/process_lib?rev=1fe8612#1fe8612a24f6806270ac9b02c7bd4f464a1422ed"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"alloy-primitives 0.7.7",
|
||||
|
@ -9,7 +9,7 @@ simulation-mode = []
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
bincode = "1.3.3"
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "778457a" }
|
||||
kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib", rev = "1fe8612" }
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -13,15 +13,15 @@ use std::str::FromStr;
|
||||
use crate::{keygen, sol::*, KIMAP_ADDRESS, MULTICALL_ADDRESS};
|
||||
|
||||
// TODO move these into contracts registry, doublecheck optimism deployments
|
||||
const FAKE_DOTDEV_TBA: &str = "0x1a5447E634aa056Fa302E48630Da8425EC15A53A";
|
||||
const FAKE_DOTOS_TBA: &str = "0xF5FaB379Eb87599d7B5BaBeDDEFe6EfDEC6164b0";
|
||||
const _FAKE_ZEROTH_TBA: &str = "0x02dd7FB5ca377b1a6E2960EB139aF390a24D28FA";
|
||||
const FAKE_DOTDEV_TBA: &str = "0x27e913BF6dcd08E9E68530812B277224Be07890B";
|
||||
const FAKE_DOTOS_TBA: &str = "0xC026fE4950c12AdACF284689d900AcC74987c555";
|
||||
const _FAKE_ZEROTH_TBA: &str = "0x33b687295Cb095d9d962BA83732c67B96dffC8eA";
|
||||
|
||||
const KINO_ACCOUNT_IMPL: &str = "0x9fE46736679d2D9a65F0992F2272dE9f3c7fa6e0";
|
||||
const KINO_ACCOUNT_IMPL: &str = "0x00ee0e0d00F01f6FF3aCcBA2986E07f99181b9c2";
|
||||
|
||||
const MULTICALL: &str = "0xcA11bde05977b3631167028862bE2a173976CA11";
|
||||
|
||||
const KIMAP: &str = "0xEce71a05B36CA55B895427cD9a440eEF7Cf3669D";
|
||||
const KIMAP: &str = "0x9CE8cCD2932DC727c70f9ae4f8C2b68E6Abed58C";
|
||||
|
||||
/// Attempts to connect to a local anvil fakechain,
|
||||
/// registering a name with its KiMap contract.
|
||||
|
@ -153,8 +153,9 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/crypto-js/4.2.0/crypto-js.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/argon2-browser/1.18.0/argon2-bundled.min.js"
|
||||
integrity="sha512-Alrh8vbmKDc5xiq7I/y8LTDwy9nw1nT9S/yR73HMMoWrpX4S1kizNPdWM896c/CDIGILNwAiaih627A94kRhYQ=="
|
||||
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<script>
|
||||
let isInitialized = false;
|
||||
|
||||
@ -184,28 +185,37 @@
|
||||
document.getElementById("login-form").style.display = "none";
|
||||
document.getElementById("loading").style.display = "flex";
|
||||
|
||||
// sha256 hash password using crypto-js
|
||||
const hashHex = '0x' + CryptoJS.SHA256(password).toString(CryptoJS.enc.Hex);
|
||||
argon2.hash({
|
||||
pass: password,
|
||||
salt: '${node}',
|
||||
hashLen: 32,
|
||||
time: 2,
|
||||
mem: 19456,
|
||||
type: argon2.ArgonType.Argon2id
|
||||
}).then(async h => {
|
||||
const hashed_password_hex = `0x${h.hashHex}`;
|
||||
|
||||
const result = await fetch("/login", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
password_hash: hashHex,
|
||||
subdomain: isSecureSubdomain ? firstPathItem : '',
|
||||
}),
|
||||
});
|
||||
const result = await fetch("/login", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
password_hash: hashed_password_hex,
|
||||
subdomain: isSecureSubdomain ? firstPathItem : '',
|
||||
}),
|
||||
});
|
||||
|
||||
if (result.status == 200) {
|
||||
// reload page
|
||||
window.location.reload();
|
||||
} else {
|
||||
if (result.status == 200) {
|
||||
window.location.reload();
|
||||
} else {
|
||||
throw new Error("Login failed");
|
||||
}
|
||||
}).catch(err => {
|
||||
document.getElementById("login-form").style.display = "flex";
|
||||
document.getElementById("loading").style.display = "none";
|
||||
document.getElementById("password").value = "";
|
||||
document.getElementById("password-err").style.display = "flex";
|
||||
document.getElementById("password").focus();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function generateSecureSubdomain(processString) {
|
||||
@ -234,4 +244,4 @@
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
@ -15,12 +15,13 @@ use lib::types::core::{
|
||||
};
|
||||
use route_recognizer::Router;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::collections::HashMap;
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::Arc;
|
||||
use std::{collections::HashMap, net::SocketAddr, sync::Arc};
|
||||
use tokio::sync::RwLock;
|
||||
use warp::{
|
||||
http::{header::HeaderValue, StatusCode},
|
||||
http::{
|
||||
header::{HeaderValue, SET_COOKIE},
|
||||
StatusCode,
|
||||
},
|
||||
ws::{WebSocket, Ws},
|
||||
Filter, Reply,
|
||||
};
|
||||
@ -30,6 +31,8 @@ const HTTP_SELF_IMPOSED_TIMEOUT: u64 = 15;
|
||||
#[cfg(feature = "simulation-mode")]
|
||||
const HTTP_SELF_IMPOSED_TIMEOUT: u64 = 600;
|
||||
|
||||
const WS_SELF_IMPOSED_MAX_CONNECTIONS: u32 = 128;
|
||||
|
||||
const LOGIN_HTML: &str = include_str!("login.html");
|
||||
|
||||
/// mapping from a given HTTP request (assigned an ID) to the oneshot
|
||||
@ -60,7 +63,6 @@ struct BoundWsPath {
|
||||
pub app: Option<ProcessId>, // if None, path has been unbound
|
||||
pub secure_subdomain: Option<String>,
|
||||
pub authenticated: bool,
|
||||
pub encrypted: bool, // TODO use
|
||||
pub extension: bool,
|
||||
}
|
||||
|
||||
@ -293,6 +295,8 @@ async fn serve(
|
||||
warp::reply::with_status(warp::reply::html(cloned_login_html), StatusCode::OK)
|
||||
})
|
||||
.or(warp::post()
|
||||
.and(warp::filters::host::optional())
|
||||
.and(warp::query::<HashMap<String, String>>())
|
||||
.and(warp::body::content_length_limit(1024 * 16))
|
||||
.and(warp::body::json())
|
||||
.and(warp::any().map(move || cloned_our.clone()))
|
||||
@ -325,7 +329,12 @@ async fn serve(
|
||||
|
||||
/// handle non-GET requests on /login. if POST, validate password
|
||||
/// and return auth token, which will be stored in a cookie.
|
||||
///
|
||||
/// if redirect is provided in URL, such as ?redirect=/chess:chess:sys/,
|
||||
/// the browser will be redirected to that path after successful login.
|
||||
async fn login_handler(
|
||||
host: Option<warp::host::Authority>,
|
||||
query_params: HashMap<String, String>,
|
||||
info: LoginInfo,
|
||||
our: Arc<String>,
|
||||
encoded_keyfile: Arc<Vec<u8>>,
|
||||
@ -353,20 +362,63 @@ async fn login_handler(
|
||||
}
|
||||
};
|
||||
|
||||
let mut response = warp::reply::with_status(
|
||||
warp::reply::json(&base64_standard.encode(encoded_keyfile.to_vec())),
|
||||
StatusCode::OK,
|
||||
)
|
||||
.into_response();
|
||||
let mut response = if let Some(redirect) = query_params.get("redirect") {
|
||||
warp::reply::with_status(warp::reply(), StatusCode::SEE_OTHER).into_response()
|
||||
} else {
|
||||
warp::reply::with_status(
|
||||
warp::reply::json(&base64_standard.encode(encoded_keyfile.to_vec())),
|
||||
StatusCode::OK,
|
||||
)
|
||||
.into_response()
|
||||
};
|
||||
|
||||
let cookie = match info.subdomain.unwrap_or_default().as_str() {
|
||||
"" => format!("kinode-auth_{our}={token};"),
|
||||
subdomain => format!("kinode-auth_{our}@{subdomain}={token};"),
|
||||
subdomain => {
|
||||
// enforce that subdomain string only contains a-z, 0-9, ., :, and -
|
||||
let subdomain = subdomain
|
||||
.chars()
|
||||
.filter(|c| {
|
||||
c.is_ascii_alphanumeric() || c == &'-' || c == &':' || c == &'.'
|
||||
})
|
||||
.collect::<String>();
|
||||
format!("kinode-auth_{our}@{subdomain}={token};")
|
||||
}
|
||||
};
|
||||
|
||||
match HeaderValue::from_str(&cookie) {
|
||||
Ok(v) => {
|
||||
response.headers_mut().append("set-cookie", v);
|
||||
response.headers_mut().append(SET_COOKIE, v);
|
||||
response
|
||||
.headers_mut()
|
||||
.append("HttpOnly", HeaderValue::from_static("true"));
|
||||
response
|
||||
.headers_mut()
|
||||
.append("Secure", HeaderValue::from_static("true"));
|
||||
response
|
||||
.headers_mut()
|
||||
.append("SameSite", HeaderValue::from_static("Strict"));
|
||||
|
||||
if let Some(redirect) = query_params.get("redirect") {
|
||||
// get http/https from request headers
|
||||
let proto = match response.headers().get("X-Forwarded-Proto") {
|
||||
Some(proto) => proto.to_str().unwrap_or("http").to_string(),
|
||||
None => "http".to_string(),
|
||||
};
|
||||
|
||||
response.headers_mut().append(
|
||||
"Location",
|
||||
HeaderValue::from_str(&format!(
|
||||
"{proto}://{}{redirect}",
|
||||
host.unwrap()
|
||||
))
|
||||
.unwrap(),
|
||||
);
|
||||
response
|
||||
.headers_mut()
|
||||
.append("Content-Length", HeaderValue::from_str("0").unwrap());
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => Ok(warp::reply::with_status(
|
||||
@ -402,6 +454,19 @@ async fn ws_handler(
|
||||
.send(&print_tx)
|
||||
.await;
|
||||
|
||||
if ws_senders.len() >= WS_SELF_IMPOSED_MAX_CONNECTIONS as usize {
|
||||
Printout::new(
|
||||
0,
|
||||
format!(
|
||||
"http-server: too many open websockets ({})! rejecting incoming",
|
||||
ws_senders.len()
|
||||
),
|
||||
)
|
||||
.send(&print_tx)
|
||||
.await;
|
||||
return Err(warp::reject::reject());
|
||||
}
|
||||
|
||||
let serialized_headers = utils::serialize_headers(&headers);
|
||||
|
||||
let ws_path_bindings = ws_path_bindings.read().await;
|
||||
@ -428,12 +493,12 @@ async fn ws_handler(
|
||||
// parse out subdomain from host (there can only be one)
|
||||
let request_subdomain = host.host().split('.').next().unwrap_or("");
|
||||
if request_subdomain != subdomain
|
||||
|| !utils::auth_cookie_valid(&our, Some(&app), auth_token, &jwt_secret_bytes)
|
||||
|| !utils::auth_token_valid(&our, Some(&app), auth_token, &jwt_secret_bytes)
|
||||
{
|
||||
return Err(warp::reject::not_found());
|
||||
}
|
||||
} else {
|
||||
if !utils::auth_cookie_valid(&our, None, auth_token, &jwt_secret_bytes) {
|
||||
if !utils::auth_token_valid(&our, None, auth_token, &jwt_secret_bytes) {
|
||||
return Err(warp::reject::not_found());
|
||||
}
|
||||
}
|
||||
@ -467,7 +532,6 @@ async fn ws_handler(
|
||||
our.clone(),
|
||||
app,
|
||||
formatted_path,
|
||||
jwt_secret_bytes.clone(),
|
||||
ws_senders.clone(),
|
||||
send_to_loop.clone(),
|
||||
print_tx.clone(),
|
||||
@ -568,7 +632,7 @@ async fn http_handler(
|
||||
.body(vec![])
|
||||
.into_response());
|
||||
}
|
||||
if !utils::auth_cookie_valid(
|
||||
if !utils::auth_token_valid(
|
||||
&our,
|
||||
Some(&app),
|
||||
serialized_headers.get("cookie").unwrap_or(&"".to_string()),
|
||||
@ -581,7 +645,7 @@ async fn http_handler(
|
||||
.into_response());
|
||||
}
|
||||
} else {
|
||||
if !utils::auth_cookie_valid(
|
||||
if !utils::auth_token_valid(
|
||||
&our,
|
||||
None,
|
||||
serialized_headers.get("cookie").unwrap_or(&"".to_string()),
|
||||
@ -911,7 +975,6 @@ async fn maintain_websocket(
|
||||
our: Arc<String>,
|
||||
app: ProcessId,
|
||||
path: String,
|
||||
_jwt_secret_bytes: Arc<Vec<u8>>, // TODO use for encrypted channels
|
||||
ws_senders: WebSocketSenders,
|
||||
send_to_loop: MessageSender,
|
||||
print_tx: PrintSender,
|
||||
@ -1266,7 +1329,6 @@ async fn handle_app_message(
|
||||
HttpServerAction::WebSocketBind {
|
||||
path,
|
||||
authenticated,
|
||||
encrypted,
|
||||
extension,
|
||||
} => {
|
||||
if check_process_id_kimap_safe(&km.source.process).is_err() {
|
||||
@ -1293,16 +1355,11 @@ async fn handle_app_message(
|
||||
app: Some(km.source.process.clone()),
|
||||
secure_subdomain: None,
|
||||
authenticated,
|
||||
encrypted,
|
||||
extension,
|
||||
},
|
||||
);
|
||||
}
|
||||
HttpServerAction::WebSocketSecureBind {
|
||||
path,
|
||||
encrypted,
|
||||
extension,
|
||||
} => {
|
||||
HttpServerAction::WebSocketSecureBind { path, extension } => {
|
||||
if check_process_id_kimap_safe(&km.source.process).is_err() {
|
||||
let source = km.source.clone();
|
||||
send_action_response(
|
||||
@ -1328,7 +1385,6 @@ async fn handle_app_message(
|
||||
app: Some(km.source.process.clone()),
|
||||
secure_subdomain: Some(subdomain),
|
||||
authenticated: true,
|
||||
encrypted,
|
||||
extension,
|
||||
},
|
||||
);
|
||||
@ -1342,7 +1398,6 @@ async fn handle_app_message(
|
||||
app: None,
|
||||
secure_subdomain: None,
|
||||
authenticated: false,
|
||||
encrypted: false,
|
||||
extension: false,
|
||||
},
|
||||
);
|
||||
|
@ -20,27 +20,13 @@ pub struct RpcMessage {
|
||||
pub data: Option<String>,
|
||||
}
|
||||
|
||||
/// Ingest an auth token given from client and return the node name or an error.
|
||||
pub fn _verify_auth_token(auth_token: &str, jwt_secret: &[u8]) -> Result<String, jwt::Error> {
|
||||
let Ok(secret) = Hmac::<Sha256>::new_from_slice(jwt_secret) else {
|
||||
return Err(jwt::Error::Format);
|
||||
};
|
||||
|
||||
let claims: Result<http_server::JwtClaims, jwt::Error> = auth_token.verify_with_key(&secret);
|
||||
|
||||
match claims {
|
||||
Ok(data) => Ok(data.username),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn auth_cookie_valid(
|
||||
pub fn auth_token_valid(
|
||||
our_node: &str,
|
||||
subdomain: Option<&ProcessId>,
|
||||
cookie: &str,
|
||||
auth_token: &str,
|
||||
jwt_secret: &[u8],
|
||||
) -> bool {
|
||||
let cookie: Vec<&str> = cookie.split("; ").collect();
|
||||
let token: Vec<&str> = auth_token.split("; ").collect();
|
||||
|
||||
let token_label = match subdomain {
|
||||
None => format!("kinode-auth_{our_node}"),
|
||||
@ -48,10 +34,10 @@ pub fn auth_cookie_valid(
|
||||
};
|
||||
|
||||
let mut auth_token = None;
|
||||
for entry in cookie {
|
||||
let cookie_parts: Vec<&str> = entry.split('=').collect();
|
||||
if cookie_parts.len() == 2 && cookie_parts[0] == token_label {
|
||||
auth_token = Some(cookie_parts[1].to_string());
|
||||
for entry in token {
|
||||
let token_parts: Vec<&str> = entry.split('=').collect();
|
||||
if token_parts.len() == 2 && token_parts[0] == token_label {
|
||||
auth_token = Some(token_parts[1].to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -65,10 +51,21 @@ pub fn auth_cookie_valid(
|
||||
return false;
|
||||
};
|
||||
|
||||
// Verify JWT structure (header.payload.signature) before attempting to decode
|
||||
let jwt_format =
|
||||
regex::Regex::new(r"^[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+$").unwrap();
|
||||
if !jwt_format.is_match(&auth_token) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let claims: Result<http_server::JwtClaims, _> = auth_token.verify_with_key(&secret);
|
||||
|
||||
match claims {
|
||||
Ok(data) => data.username == our_node && data.subdomain == subdomain.map(|s| s.to_string()),
|
||||
Ok(data) => {
|
||||
data.username == our_node
|
||||
&& data.subdomain == subdomain.map(|s| s.to_string())
|
||||
&& data.expiration > chrono::Utc::now().timestamp() as u64
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ pub struct RestartBackoff {
|
||||
/// how many times has process tried to restart in a row
|
||||
consecutive_attempts: u32,
|
||||
/// task that will do the restart after wait time has elapsed
|
||||
restart_handle: Option<JoinHandle<()>>,
|
||||
_restart_handle: Option<JoinHandle<()>>,
|
||||
}
|
||||
|
||||
/// persist kernel's process_map state for next bootup
|
||||
|
@ -515,7 +515,7 @@ pub async fn make_process_loop(
|
||||
*restart_backoff_lock = Some(RestartBackoff {
|
||||
next_soonest_restart_time,
|
||||
consecutive_attempts,
|
||||
restart_handle,
|
||||
_restart_handle: restart_handle,
|
||||
});
|
||||
}
|
||||
// if requests, fire them
|
||||
|
@ -99,7 +99,10 @@ impl process::ProcessState {
|
||||
ref expects_response,
|
||||
..
|
||||
}) => {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
if km.lazy_load_blob.is_some() {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
}
|
||||
km.lazy_load_blob = None;
|
||||
if expects_response.is_some() || km.rsvp.is_some() {
|
||||
// update prompting_message iff there is someone to reply to
|
||||
@ -109,13 +112,19 @@ impl process::ProcessState {
|
||||
}
|
||||
t::Message::Response(_) => match self.contexts.remove(&km.id) {
|
||||
Some((context, _timeout_handle)) => {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
if km.lazy_load_blob.is_some() {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
}
|
||||
km.lazy_load_blob = None;
|
||||
self.prompting_message = context.prompting_message;
|
||||
(km, context.context)
|
||||
}
|
||||
None => {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
if km.lazy_load_blob.is_some() {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
}
|
||||
km.lazy_load_blob = None;
|
||||
self.prompting_message = Some(km.clone());
|
||||
(km, None)
|
||||
|
@ -103,8 +103,10 @@ impl process::ProcessState {
|
||||
ref expects_response,
|
||||
..
|
||||
}) => {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
if km.lazy_load_blob.is_some() {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
}
|
||||
if expects_response.is_some() || km.rsvp.is_some() {
|
||||
// update prompting_message iff there is someone to reply to
|
||||
self.prompting_message = Some(km.clone());
|
||||
@ -113,14 +115,18 @@ impl process::ProcessState {
|
||||
}
|
||||
t::Message::Response(_) => match self.contexts.remove(&km.id) {
|
||||
Some((context, _timeout_handle)) => {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
if km.lazy_load_blob.is_some() {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
}
|
||||
self.prompting_message = context.prompting_message;
|
||||
(km, context.context)
|
||||
}
|
||||
None => {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
if km.lazy_load_blob.is_some() {
|
||||
self.last_blob = km.lazy_load_blob;
|
||||
km.lazy_load_blob = None;
|
||||
}
|
||||
self.prompting_message = Some(km.clone());
|
||||
(km, None)
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ use ring::signature::{self, KeyPair};
|
||||
use std::{
|
||||
net::{IpAddr, Ipv4Addr, Ipv6Addr},
|
||||
num::NonZeroU32,
|
||||
ops::Add,
|
||||
};
|
||||
|
||||
type DiskKey = [u8; CREDENTIAL_LEN];
|
||||
@ -64,7 +65,7 @@ pub fn encode_keyfile(
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn decode_keyfile(keyfile: &[u8], password: &str) -> Result<Keyfile, &'static str> {
|
||||
pub fn decode_keyfile(keyfile: &[u8], password_hash: &str) -> Result<Keyfile, &'static str> {
|
||||
use generic_array::GenericArray;
|
||||
|
||||
let (username, routers, salt, key_enc, jwt_enc, file_enc) =
|
||||
@ -84,7 +85,7 @@ pub fn decode_keyfile(keyfile: &[u8], password: &str) -> Result<Keyfile, &'stati
|
||||
PBKDF2_ALG,
|
||||
NonZeroU32::new(ITERATIONS).unwrap(),
|
||||
&salt,
|
||||
password.as_bytes(),
|
||||
password_hash.as_bytes(),
|
||||
&mut disk_key,
|
||||
);
|
||||
|
||||
@ -134,10 +135,15 @@ pub fn generate_jwt(
|
||||
subdomain => Some(subdomain.to_string()),
|
||||
};
|
||||
|
||||
// set JWT to expire in 6 months
|
||||
let expiration = chrono::Utc::now()
|
||||
.add(chrono::Duration::weeks(26))
|
||||
.timestamp() as u64;
|
||||
|
||||
let claims = crate::http::server_types::JwtClaims {
|
||||
username: username.to_string(),
|
||||
subdomain,
|
||||
expiration: 0,
|
||||
expiration,
|
||||
};
|
||||
|
||||
claims.sign_with_key(&jwt_secret).ok()
|
||||
|
@ -830,20 +830,37 @@ async fn login_with_password(
|
||||
maybe_rpc: Option<String>,
|
||||
password: &str,
|
||||
) -> (Identity, Vec<u8>, Keyfile) {
|
||||
use {
|
||||
ring::signature::KeyPair,
|
||||
sha2::{Digest, Sha256},
|
||||
};
|
||||
use argon2::Argon2;
|
||||
use ring::signature::KeyPair;
|
||||
|
||||
let disk_keyfile: Vec<u8> = tokio::fs::read(home_directory_path.join(".keys"))
|
||||
.await
|
||||
.expect("could not read keyfile");
|
||||
|
||||
let password_hash = format!("0x{}", hex::encode(Sha256::digest(password)));
|
||||
let (username, _, _, _, _, _) =
|
||||
serde_json::from_slice::<(String, Vec<String>, Vec<u8>, Vec<u8>, Vec<u8>, Vec<u8>)>(
|
||||
&disk_keyfile,
|
||||
)
|
||||
.or_else(|_| {
|
||||
bincode::deserialize::<(String, Vec<String>, Vec<u8>, Vec<u8>, Vec<u8>, Vec<u8>)>(
|
||||
&disk_keyfile,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let provider = Arc::new(register::connect_to_provider(maybe_rpc).await);
|
||||
let mut output_key_material = [0u8; 32];
|
||||
Argon2::default()
|
||||
.hash_password_into(
|
||||
password.as_bytes(),
|
||||
username.as_bytes(),
|
||||
&mut output_key_material,
|
||||
)
|
||||
.expect("password hashing failed");
|
||||
let password_hash = hex::encode(output_key_material);
|
||||
|
||||
let k = keygen::decode_keyfile(&disk_keyfile, &password_hash)
|
||||
let password_hash_hex = format!("0x{}", password_hash);
|
||||
|
||||
let k = keygen::decode_keyfile(&disk_keyfile, &password_hash_hex)
|
||||
.expect("could not decode keyfile, password incorrect");
|
||||
|
||||
let mut our = Identity {
|
||||
@ -862,6 +879,8 @@ async fn login_with_password(
|
||||
},
|
||||
};
|
||||
|
||||
let provider = Arc::new(register::connect_to_provider(maybe_rpc).await);
|
||||
|
||||
register::assign_routing(
|
||||
&mut our,
|
||||
provider,
|
||||
|
@ -207,12 +207,16 @@ async fn recv_connection(
|
||||
&their_id,
|
||||
)?;
|
||||
|
||||
let (peer, peer_rx) = Peer::new(their_id.clone(), their_handshake.proxy_request);
|
||||
data.peers.insert(their_id.name.clone(), peer).await;
|
||||
// if we already have a connection to this peer, kill it so we
|
||||
// don't build a duplicate connection
|
||||
if let Some(mut peer) = data.peers.get_mut(&their_handshake.name) {
|
||||
peer.kill();
|
||||
}
|
||||
|
||||
tokio::spawn(utils::maintain_connection(
|
||||
let (mut peer, peer_rx) = Peer::new(their_id.clone(), their_handshake.proxy_request);
|
||||
peer.handle = Some(tokio::spawn(utils::maintain_connection(
|
||||
their_handshake.name,
|
||||
data.peers,
|
||||
data.peers.clone(),
|
||||
PeerConnection {
|
||||
noise: noise.into_transport_mode()?,
|
||||
buf,
|
||||
@ -221,7 +225,8 @@ async fn recv_connection(
|
||||
peer_rx,
|
||||
ext.kernel_message_tx,
|
||||
ext.print_tx,
|
||||
));
|
||||
)));
|
||||
data.peers.insert(their_id.name.clone(), peer).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -322,17 +327,17 @@ pub async fn recv_via_router(
|
||||
};
|
||||
match connect_with_handshake_via_router(&ext, &peer_id, &router_id, stream).await {
|
||||
Ok(connection) => {
|
||||
let (peer, peer_rx) = Peer::new(peer_id.clone(), false);
|
||||
data.peers.insert(peer_id.name.clone(), peer).await;
|
||||
// maintain direct connection
|
||||
tokio::spawn(utils::maintain_connection(
|
||||
peer_id.name,
|
||||
let (mut peer, peer_rx) = Peer::new(peer_id.clone(), false);
|
||||
peer.handle = Some(tokio::spawn(utils::maintain_connection(
|
||||
peer_id.name.clone(),
|
||||
data.peers.clone(),
|
||||
connection,
|
||||
peer_rx,
|
||||
ext.kernel_message_tx,
|
||||
ext.print_tx,
|
||||
));
|
||||
)));
|
||||
data.peers.insert(peer_id.name, peer).await;
|
||||
}
|
||||
Err(e) => {
|
||||
print_debug(&ext.print_tx, &format!("net: error getting routed: {e}")).await;
|
||||
|
@ -175,6 +175,7 @@ pub struct Peer {
|
||||
/// associated with them. We can send them prompts to establish Passthroughs.
|
||||
pub routing_for: bool,
|
||||
pub sender: UnboundedSender<KernelMessage>,
|
||||
pub handle: Option<tokio::task::JoinHandle<()>>,
|
||||
/// unix timestamp of last message sent *or* received
|
||||
pub last_message: u64,
|
||||
}
|
||||
@ -189,6 +190,7 @@ impl Peer {
|
||||
identity,
|
||||
routing_for,
|
||||
sender: peer_tx,
|
||||
handle: None,
|
||||
last_message: std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
@ -215,6 +217,12 @@ impl Peer {
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
pub fn kill(&mut self) {
|
||||
if let Some(handle) = self.handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
/// [`Identity`], with additional fields for networking.
|
||||
#[derive(Clone)]
|
||||
|
@ -187,17 +187,17 @@ pub async fn recv_via_router(
|
||||
};
|
||||
match connect_with_handshake_via_router(&ext, &peer_id, &router_id, socket).await {
|
||||
Ok(connection) => {
|
||||
let (peer, peer_rx) = Peer::new(peer_id.clone(), false);
|
||||
data.peers.insert(peer_id.name.clone(), peer).await;
|
||||
// maintain direct connection
|
||||
tokio::spawn(utils::maintain_connection(
|
||||
peer_id.name,
|
||||
let (mut peer, peer_rx) = Peer::new(peer_id.clone(), false);
|
||||
peer.handle = Some(tokio::spawn(utils::maintain_connection(
|
||||
peer_id.name.clone(),
|
||||
data.peers.clone(),
|
||||
connection,
|
||||
peer_rx,
|
||||
ext.kernel_message_tx,
|
||||
ext.print_tx,
|
||||
));
|
||||
)));
|
||||
data.peers.insert(peer_id.name, peer).await;
|
||||
}
|
||||
Err(e) => {
|
||||
print_debug(&ext.print_tx, &format!("net: error getting routed: {e}")).await;
|
||||
@ -263,12 +263,16 @@ async fn recv_connection(
|
||||
&their_id,
|
||||
)?;
|
||||
|
||||
let (peer, peer_rx) = Peer::new(their_id.clone(), their_handshake.proxy_request);
|
||||
data.peers.insert(their_id.name.clone(), peer).await;
|
||||
// if we already have a connection to this peer, kill it so we
|
||||
// don't build a duplicate connection
|
||||
if let Some(mut peer) = data.peers.get_mut(&their_handshake.name) {
|
||||
peer.kill();
|
||||
}
|
||||
|
||||
tokio::spawn(utils::maintain_connection(
|
||||
let (mut peer, peer_rx) = Peer::new(their_id.clone(), their_handshake.proxy_request);
|
||||
peer.handle = Some(tokio::spawn(utils::maintain_connection(
|
||||
their_handshake.name,
|
||||
data.peers,
|
||||
data.peers.clone(),
|
||||
PeerConnection {
|
||||
noise: noise.into_transport_mode()?,
|
||||
buf,
|
||||
@ -277,7 +281,8 @@ async fn recv_connection(
|
||||
peer_rx,
|
||||
ext.kernel_message_tx,
|
||||
ext.print_tx,
|
||||
));
|
||||
)));
|
||||
data.peers.insert(their_id.name.clone(), peer).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -16,6 +16,9 @@
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div id="root"></div>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/argon2-browser/1.18.0/argon2-bundled.min.js"
|
||||
integrity="sha512-Alrh8vbmKDc5xiq7I/y8LTDwy9nw1nT9S/yR73HMMoWrpX4S1kizNPdWM896c/CDIGILNwAiaih627A94kRhYQ=="
|
||||
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<script type="module" src="/src/index.tsx"></script>
|
||||
</body>
|
||||
|
||||
|
6
kinode/src/register-ui/package-lock.json
generated
6
kinode/src/register-ui/package-lock.json
generated
@ -12,6 +12,7 @@
|
||||
"@rainbow-me/rainbowkit": "^2.1.6",
|
||||
"@szhsin/react-menu": "^4.1.0",
|
||||
"@tanstack/react-query": "^5.45.1",
|
||||
"argon2-browser": "^1.18.0",
|
||||
"idna-uts46-hx": "^6.0.4",
|
||||
"is-valid-domain": "^0.1.6",
|
||||
"jazzicon": "^1.5.0",
|
||||
@ -6390,6 +6391,11 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/argon2-browser": {
|
||||
"version": "1.18.0",
|
||||
"resolved": "https://registry.npmjs.org/argon2-browser/-/argon2-browser-1.18.0.tgz",
|
||||
"integrity": "sha512-ImVAGIItnFnvET1exhsQB7apRztcoC5TnlSqernMJDUjbc/DLq3UEYeXFrLPrlaIl8cVfwnXb6wX2KpFf2zxHw=="
|
||||
},
|
||||
"node_modules/argparse": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
|
||||
|
@ -8,6 +8,7 @@
|
||||
"@rainbow-me/rainbowkit": "^2.1.6",
|
||||
"@szhsin/react-menu": "^4.1.0",
|
||||
"@tanstack/react-query": "^5.45.1",
|
||||
"argon2-browser": "^1.18.0",
|
||||
"idna-uts46-hx": "^6.0.4",
|
||||
"is-valid-domain": "^0.1.6",
|
||||
"jazzicon": "^1.5.0",
|
||||
@ -60,4 +61,4 @@
|
||||
"vite": "^5.2.10"
|
||||
},
|
||||
"type": "module"
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ import {
|
||||
} from "react";
|
||||
import { PageProps } from "../lib/types";
|
||||
import Loader from "../components/Loader";
|
||||
import { sha256, toBytes } from "viem";
|
||||
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
||||
|
||||
interface ImportKeyfileProps extends PageProps { }
|
||||
@ -24,6 +23,7 @@ function ImportKeyfile({
|
||||
const [pwVet, _setPwVet] = useState<boolean>(false);
|
||||
const [pwDebounced, _setPwDebounced] = useState<boolean>(false);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [knsName, setKnsName] = useState<string>("");
|
||||
|
||||
useEffect(() => {
|
||||
document.title = "Import Keyfile";
|
||||
@ -55,26 +55,30 @@ function ImportKeyfile({
|
||||
|
||||
try {
|
||||
if (keyErrs.length === 0 && localKey !== null) {
|
||||
let hashed_password = sha256(toBytes(pw));
|
||||
argon2.hash({ pass: pw, salt: knsName, hashLen: 32, time: 2, mem: 19456, type: argon2.ArgonType.Argon2id }).then(async h => {
|
||||
const hashed_password_hex = `0x${h.hashHex}`;
|
||||
|
||||
const result = await fetch("/import-keyfile", {
|
||||
method: "POST",
|
||||
credentials: 'include',
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
keyfile: Buffer.from(localKey).toString('utf8'),
|
||||
password_hash: hashed_password,
|
||||
}),
|
||||
const result = await fetch("/import-keyfile", {
|
||||
method: "POST",
|
||||
credentials: 'include',
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
keyfile: Buffer.from(localKey).toString('utf8'),
|
||||
password_hash: hashed_password_hex,
|
||||
}),
|
||||
});
|
||||
|
||||
if (result.status > 399) {
|
||||
throw new Error(await result.text());
|
||||
}
|
||||
redirectToHomepage();
|
||||
}).catch(err => {
|
||||
window.alert(String(err));
|
||||
setLoading(false);
|
||||
});
|
||||
|
||||
if (result.status > 399) {
|
||||
throw new Error("Incorrect password");
|
||||
}
|
||||
redirectToHomepage();
|
||||
|
||||
}
|
||||
} catch {
|
||||
window.alert("An error occurred, please try again.");
|
||||
} catch (err) {
|
||||
window.alert(String(err));
|
||||
setLoading(false);
|
||||
}
|
||||
},
|
||||
@ -103,8 +107,19 @@ function ImportKeyfile({
|
||||
</span>
|
||||
</label>
|
||||
{localKeyFileName && <p className="mt-2">{localKeyFileName}</p>}
|
||||
</div> <div className="form-group">
|
||||
<h4 className="form-label">2. Enter Password</h4>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<h4 className="form-label">2. Enter Node ID</h4>
|
||||
<label className="name-input-label">
|
||||
<input
|
||||
type="text"
|
||||
className="name-input"
|
||||
onChange={(e) => setKnsName(e.target.value)}
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div className="form-group">
|
||||
<h4 className="form-label">3. Enter Password</h4>
|
||||
<input
|
||||
type="password"
|
||||
id="password"
|
||||
|
@ -2,7 +2,6 @@ import { FormEvent, useCallback, useEffect, useState } from "react";
|
||||
import { PageProps, UnencryptedIdentity } from "../lib/types";
|
||||
import Loader from "../components/Loader";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { sha256, toBytes } from "viem";
|
||||
import { Tooltip } from "../components/Tooltip";
|
||||
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
||||
|
||||
@ -40,9 +39,9 @@ function Login({
|
||||
e?.preventDefault();
|
||||
e?.stopPropagation();
|
||||
|
||||
try {
|
||||
setLoading("Logging in...");
|
||||
let hashed_password = sha256(toBytes(pw));
|
||||
setLoading("Logging in...");
|
||||
argon2.hash({ pass: pw, salt: knsName, hashLen: 32, time: 2, mem: 19456, type: argon2.ArgonType.Argon2id }).then(async h => {
|
||||
const hashed_password_hex = `0x${h.hashHex}`;
|
||||
|
||||
const result = await fetch(
|
||||
"/login",
|
||||
@ -50,7 +49,7 @@ function Login({
|
||||
method: "POST",
|
||||
credentials: 'include',
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ password_hash: hashed_password }),
|
||||
body: JSON.stringify({ password_hash: hashed_password_hex }),
|
||||
}
|
||||
);
|
||||
|
||||
@ -58,11 +57,10 @@ function Login({
|
||||
throw new Error(await result.text());
|
||||
}
|
||||
redirectToHomepage();
|
||||
|
||||
} catch (err: any) {
|
||||
}).catch(err => {
|
||||
setKeyErrs([String(err)]);
|
||||
setLoading("");
|
||||
}
|
||||
});
|
||||
},
|
||||
[pw]
|
||||
);
|
||||
|
@ -2,7 +2,6 @@ import React, { useState, useEffect, FormEvent, useCallback } from "react";
|
||||
import Loader from "../components/Loader";
|
||||
import { downloadKeyfile } from "../utils/download-keyfile";
|
||||
import { Tooltip } from "../components/Tooltip";
|
||||
import { sha256, toBytes } from "viem";
|
||||
import { useSignTypedData, useAccount, useChainId } from 'wagmi'
|
||||
import { KIMAP } from "../abis";
|
||||
import { redirectToHomepage } from "../utils/redirect-to-homepage";
|
||||
@ -51,63 +50,68 @@ function SetPassword({
|
||||
|
||||
setTimeout(async () => {
|
||||
setLoading(true);
|
||||
let hashed_password = sha256(toBytes(pw));
|
||||
let owner = address;
|
||||
let timestamp = Date.now();
|
||||
argon2.hash({ pass: pw, salt: knsName, hashLen: 32, time: 2, mem: 19456, type: argon2.ArgonType.Argon2id }).then(async h => {
|
||||
const hashed_password_hex = `0x${h.hashHex}` as `0x${string}`;
|
||||
let owner = address;
|
||||
let timestamp = Date.now();
|
||||
|
||||
const signature = await signTypedDataAsync({
|
||||
domain: {
|
||||
name: "Kimap",
|
||||
version: "1",
|
||||
chainId: chainId,
|
||||
verifyingContract: KIMAP,
|
||||
},
|
||||
types: {
|
||||
Boot: [
|
||||
{ name: 'username', type: 'string' },
|
||||
{ name: 'password_hash', type: 'bytes32' },
|
||||
{ name: 'timestamp', type: 'uint256' },
|
||||
{ name: 'direct', type: 'bool' },
|
||||
{ name: 'reset', type: 'bool' },
|
||||
{ name: 'chain_id', type: 'uint256' },
|
||||
],
|
||||
},
|
||||
primaryType: 'Boot',
|
||||
message: {
|
||||
username: knsName,
|
||||
password_hash: hashed_password,
|
||||
timestamp: BigInt(timestamp),
|
||||
direct,
|
||||
reset,
|
||||
chain_id: BigInt(chainId),
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await fetch("/boot", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
credentials: "include",
|
||||
body: JSON.stringify({
|
||||
password_hash: hashed_password,
|
||||
reset,
|
||||
const signature = await signTypedDataAsync({
|
||||
domain: {
|
||||
name: "Kimap",
|
||||
version: "1",
|
||||
chainId: chainId,
|
||||
verifyingContract: KIMAP,
|
||||
},
|
||||
types: {
|
||||
Boot: [
|
||||
{ name: 'username', type: 'string' },
|
||||
{ name: 'password_hash', type: 'bytes32' },
|
||||
{ name: 'timestamp', type: 'uint256' },
|
||||
{ name: 'direct', type: 'bool' },
|
||||
{ name: 'reset', type: 'bool' },
|
||||
{ name: 'chain_id', type: 'uint256' },
|
||||
],
|
||||
},
|
||||
primaryType: 'Boot',
|
||||
message: {
|
||||
username: knsName,
|
||||
password_hash: hashed_password_hex,
|
||||
timestamp: BigInt(timestamp),
|
||||
direct,
|
||||
owner,
|
||||
timestamp,
|
||||
signature,
|
||||
chain_id: chainId,
|
||||
}),
|
||||
reset,
|
||||
chain_id: BigInt(chainId),
|
||||
},
|
||||
});
|
||||
const base64String = await result.json();
|
||||
|
||||
downloadKeyfile(knsName, base64String);
|
||||
redirectToHomepage();
|
||||
try {
|
||||
const result = await fetch("/boot", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
credentials: "include",
|
||||
body: JSON.stringify({
|
||||
password_hash: hashed_password_hex,
|
||||
reset,
|
||||
username: knsName,
|
||||
direct,
|
||||
owner,
|
||||
timestamp,
|
||||
signature,
|
||||
chain_id: chainId,
|
||||
}),
|
||||
});
|
||||
const base64String = await result.json();
|
||||
|
||||
} catch {
|
||||
alert("There was an error setting your password, please try again.");
|
||||
downloadKeyfile(knsName, base64String);
|
||||
redirectToHomepage();
|
||||
|
||||
} catch {
|
||||
alert("There was an error setting your password, please try again.");
|
||||
setLoading(false);
|
||||
}
|
||||
}).catch(err => {
|
||||
alert(String(err));
|
||||
setLoading(false);
|
||||
}
|
||||
});
|
||||
}, 500);
|
||||
},
|
||||
[direct, pw, pw2, reset, knsName]
|
||||
|
@ -9,8 +9,7 @@ use alloy_primitives::{Address as EthAddress, Bytes, FixedBytes, U256};
|
||||
use alloy_sol_types::{eip712_domain, SolCall, SolStruct};
|
||||
use base64::{engine::general_purpose::STANDARD as base64_standard, Engine};
|
||||
use lib::types::core::{
|
||||
BootInfo, Identity, ImportKeyfileInfo, Keyfile, LoginAndResetInfo, LoginInfo, NodeRouting,
|
||||
UnencryptedIdentity,
|
||||
BootInfo, Identity, ImportKeyfileInfo, Keyfile, LoginInfo, NodeRouting, UnencryptedIdentity,
|
||||
};
|
||||
use ring::{rand::SystemRandom, signature, signature::KeyPair};
|
||||
use std::{
|
||||
@ -68,8 +67,7 @@ pub async fn register(
|
||||
}
|
||||
|
||||
// This is a **temporary** identity, passed to the UI.
|
||||
// If it is confirmed through a /boot or /confirm-change-network-keys,
|
||||
// then it will be used to replace the current identity.
|
||||
// If it is confirmed through a /boot, then it will be used to replace the current identity.
|
||||
let our_temp_id = Arc::new(Identity {
|
||||
networking_key: format!("0x{}", public_key),
|
||||
name: "".to_string(),
|
||||
@ -217,16 +215,6 @@ pub async fn register(
|
||||
login_provider,
|
||||
)
|
||||
}),
|
||||
))
|
||||
.or(warp::path("confirm-change-network-keys").and(
|
||||
warp::post()
|
||||
.and(warp::body::content_length_limit(1024 * 16))
|
||||
.and(warp::body::json())
|
||||
.and(tx)
|
||||
.and(our_temp_id)
|
||||
.and(net_keypair)
|
||||
.and(keyfile)
|
||||
.and_then(confirm_change_network_keys),
|
||||
));
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
@ -495,7 +483,7 @@ async fn handle_import_keyfile(
|
||||
}
|
||||
Err(_) => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&"Incorrect password_hash".to_string()),
|
||||
warp::reply::json(&"Incorrect password!".to_string()),
|
||||
StatusCode::UNAUTHORIZED,
|
||||
)
|
||||
.into_response())
|
||||
@ -555,7 +543,7 @@ async fn handle_login(
|
||||
}
|
||||
Err(_) => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&"Incorrect password_hash"),
|
||||
warp::reply::json(&"Incorrect password!"),
|
||||
StatusCode::UNAUTHORIZED,
|
||||
)
|
||||
.into_response())
|
||||
@ -574,72 +562,6 @@ async fn handle_login(
|
||||
success_response(sender, our, decoded_keyfile, encoded_keyfile).await
|
||||
}
|
||||
|
||||
async fn confirm_change_network_keys(
|
||||
info: LoginAndResetInfo,
|
||||
sender: Arc<RegistrationSender>,
|
||||
our: Arc<Identity>,
|
||||
networking_keypair: Arc<Vec<u8>>,
|
||||
encoded_keyfile: Option<Vec<u8>>,
|
||||
) -> Result<impl Reply, Rejection> {
|
||||
if encoded_keyfile.is_none() {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&"Keyfile not present"),
|
||||
StatusCode::NOT_FOUND,
|
||||
)
|
||||
.into_response());
|
||||
}
|
||||
let encoded_keyfile = encoded_keyfile.unwrap();
|
||||
let mut our = our.as_ref().clone();
|
||||
|
||||
// Get our name from our current keyfile
|
||||
let old_decoded_keyfile = match keygen::decode_keyfile(&encoded_keyfile, &info.password_hash) {
|
||||
Ok(k) => {
|
||||
our.name = k.username.clone();
|
||||
k
|
||||
}
|
||||
Err(_) => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&"Invalid password"),
|
||||
StatusCode::UNAUTHORIZED,
|
||||
)
|
||||
.into_response());
|
||||
}
|
||||
};
|
||||
|
||||
// Determine if direct node or not
|
||||
|
||||
if info.direct {
|
||||
our.both_to_direct();
|
||||
} else {
|
||||
our.both_to_routers();
|
||||
}
|
||||
|
||||
let decoded_keyfile = Keyfile {
|
||||
username: our.name.clone(),
|
||||
routers: our.routers().unwrap_or(&vec![]).clone(),
|
||||
networking_keypair: signature::Ed25519KeyPair::from_pkcs8(networking_keypair.as_ref())
|
||||
.unwrap(),
|
||||
jwt_secret_bytes: old_decoded_keyfile.jwt_secret_bytes,
|
||||
file_key: old_decoded_keyfile.file_key,
|
||||
};
|
||||
|
||||
let encoded_keyfile = keygen::encode_keyfile(
|
||||
info.password_hash,
|
||||
decoded_keyfile.username.clone(),
|
||||
decoded_keyfile.routers.clone(),
|
||||
&networking_keypair,
|
||||
&decoded_keyfile.jwt_secret_bytes,
|
||||
&decoded_keyfile.file_key,
|
||||
);
|
||||
|
||||
our.networking_key = format!(
|
||||
"0x{}",
|
||||
hex::encode(decoded_keyfile.networking_keypair.public_key().as_ref())
|
||||
);
|
||||
|
||||
success_response(sender, our, decoded_keyfile, encoded_keyfile).await
|
||||
}
|
||||
|
||||
pub async fn assign_routing(
|
||||
our: &mut Identity,
|
||||
provider: Arc<RootProvider<PubSubFrontend>>,
|
||||
@ -773,6 +695,15 @@ async fn success_response(
|
||||
match HeaderValue::from_str(&format!("kinode-auth_{}={token};", our.name)) {
|
||||
Ok(v) => {
|
||||
response.headers_mut().append(SET_COOKIE, v);
|
||||
response
|
||||
.headers_mut()
|
||||
.append("HttpOnly", HeaderValue::from_static("true"));
|
||||
response
|
||||
.headers_mut()
|
||||
.append("Secure", HeaderValue::from_static("true"));
|
||||
response
|
||||
.headers_mut()
|
||||
.append("SameSite", HeaderValue::from_static("Strict"));
|
||||
}
|
||||
Err(_) => {
|
||||
return Ok(warp::reply::with_status(
|
||||
|
@ -343,7 +343,8 @@ async fn handle_request(
|
||||
|
||||
// current prepend to filepaths needs to be: /package_id/drive/path
|
||||
let (package_id, drive, rest) = parse_package_and_drive(&request.path, &vfs_path)?;
|
||||
let drive = format!("{package_id}/{drive}");
|
||||
// must have prepended `/` here or else it messes up caps downstream, e.g. in run-tests
|
||||
let drive = format!("/{package_id}/{drive}");
|
||||
let action = request.action;
|
||||
let path = PathBuf::from(&request.path);
|
||||
|
||||
|
@ -1085,12 +1085,6 @@ pub struct LoginInfo {
|
||||
pub subdomain: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LoginAndResetInfo {
|
||||
pub password_hash: String,
|
||||
pub direct: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Identity {
|
||||
pub name: NodeId,
|
||||
|
@ -102,18 +102,13 @@ pub enum HttpServerAction {
|
||||
WebSocketBind {
|
||||
path: String,
|
||||
authenticated: bool,
|
||||
encrypted: bool,
|
||||
extension: bool,
|
||||
},
|
||||
/// SecureBind is the same as Bind, except that it forces new connections to be made
|
||||
/// from the unique subdomain of the process that bound the path. These are *always*
|
||||
/// authenticated. Since the subdomain is unique, it will require the user to be
|
||||
/// logged in separately to the general domain authentication.
|
||||
WebSocketSecureBind {
|
||||
path: String,
|
||||
encrypted: bool,
|
||||
extension: bool,
|
||||
},
|
||||
WebSocketSecureBind { path: String, extension: bool },
|
||||
/// Unbind a previously-bound WebSocket path
|
||||
WebSocketUnbind { path: String },
|
||||
/// Processes will RECEIVE this kind of request when a client connects to them.
|
||||
@ -184,21 +179,17 @@ pub enum HttpServerError {
|
||||
}
|
||||
|
||||
/// Structure sent from client websocket to this server upon opening a new connection.
|
||||
/// After this is sent, depending on the `encrypted` flag, the channel will either be
|
||||
/// open to send and receive plaintext messages or messages encrypted with a symmetric
|
||||
/// key derived from the JWT.
|
||||
/// After this is sent the channel will be open to send and receive plaintext messages.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct WsRegister {
|
||||
pub auth_token: String,
|
||||
pub target_process: String,
|
||||
pub encrypted: bool, // TODO symmetric key exchange here if true
|
||||
}
|
||||
|
||||
/// Structure sent from this server to client websocket upon opening a new connection.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct WsRegisterResponse {
|
||||
pub channel_id: u32,
|
||||
// TODO symmetric key exchange here
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
|
@ -75,7 +75,7 @@ fn build_and_zip_package(
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let matches = Command::new("build_packages")
|
||||
let matches = Command::new("build-packages")
|
||||
.about("Build the core Kinode packages.")
|
||||
.arg(
|
||||
Arg::new("FEATURES")
|
||||
@ -97,8 +97,8 @@ fn main() -> anyhow::Result<()> {
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
// kinode/target/debug/build_package
|
||||
let current_exe_dir = std::env::current_exe() // build_package
|
||||
// kinode/target/debug/build-package
|
||||
let current_exe_dir = std::env::current_exe() // build-package
|
||||
.unwrap();
|
||||
let top_level_dir = current_exe_dir
|
||||
.parent() // debug/
|
||||
|
@ -33,7 +33,7 @@ def build_and_move(feature, tmp_dir, architecture, os_name):
|
||||
if feature:
|
||||
release_env["PATH_TO_PACKAGES_ZIP"] = f"../target/packages-{feature}.zip"
|
||||
subprocess.run(
|
||||
["cargo", "run", "-p", "build_packages", "--", "--features", feature],
|
||||
["cargo", "run", "-p", "build-packages", "--", "--features", feature],
|
||||
check=True,
|
||||
#stdout=subprocess.PIPE,
|
||||
#stderr=subprocess.PIPE,
|
||||
@ -47,7 +47,7 @@ def build_and_move(feature, tmp_dir, architecture, os_name):
|
||||
)
|
||||
zip_name = f"{zip_prefix}-{feature}.zip"
|
||||
else:
|
||||
subprocess.run(["cargo", "run", "-p", "build_packages"], check=True)
|
||||
subprocess.run(["cargo", "run", "-p", "build-packages"], check=True)
|
||||
subprocess.run(
|
||||
["cargo", "build", "--release", "-p", "kinode"],
|
||||
check=True,
|
||||
@ -60,7 +60,7 @@ def build_and_move(feature, tmp_dir, architecture, os_name):
|
||||
source_path = f"target/release/{binary_name}"
|
||||
dest_path = os.path.join(tmp_dir, binary_name)
|
||||
shutil.move(source_path, dest_path)
|
||||
os.chmod(dest_path, 0o775)
|
||||
os.chmod(dest_path, 0o644)
|
||||
|
||||
# Create a zip archive of the binary
|
||||
zip_path = os.path.join(tmp_dir, zip_name)
|
||||
|
@ -68,7 +68,7 @@ def main():
|
||||
subprocess.check_call([f'./{build_script_name}'], cwd=build_script_dir)
|
||||
|
||||
# Run cargo build
|
||||
subprocess.check_call(['cargo', 'build', '-p', 'build_packages'], cwd=repo_root)
|
||||
subprocess.check_call(['cargo', 'build', '-p', 'build-packages'], cwd=repo_root)
|
||||
|
||||
# Create the zip file
|
||||
output_zip = args.output
|
||||
|
Loading…
Reference in New Issue
Block a user