mirror of
https://github.com/uqbar-dao/nectar.git
synced 2024-12-22 16:11:38 +03:00
Merge branch 'develop' into hf/add-docker-build-images
This commit is contained in:
commit
5061ea860e
22
Cargo.lock
generated
22
Cargo.lock
generated
@ -2414,18 +2414,6 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docs"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"kinode_process_lib 0.9.1",
|
||||
"process_macros",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"wit-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "download"
|
||||
version = "0.1.0"
|
||||
@ -3651,7 +3639,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode"
|
||||
version = "0.9.4"
|
||||
version = "0.9.5"
|
||||
dependencies = [
|
||||
"aes-gcm",
|
||||
"alloy 0.2.1",
|
||||
@ -3712,7 +3700,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kinode_lib"
|
||||
version = "0.9.4"
|
||||
version = "0.9.5"
|
||||
dependencies = [
|
||||
"lib",
|
||||
]
|
||||
@ -3764,8 +3752,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kit"
|
||||
version = "0.7.4"
|
||||
source = "git+https://github.com/kinode-dao/kit?tag=v0.7.4#a995659fd1a41501d893cacf032175f343796096"
|
||||
version = "0.7.6"
|
||||
source = "git+https://github.com/kinode-dao/kit?tag=v0.7.6#da6676d79dfdaf47c00ce1d24874fd52de44f717"
|
||||
dependencies = [
|
||||
"alloy 0.1.4",
|
||||
"alloy-sol-macro",
|
||||
@ -3835,7 +3823,7 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
|
||||
|
||||
[[package]]
|
||||
name = "lib"
|
||||
version = "0.9.4"
|
||||
version = "0.9.5"
|
||||
dependencies = [
|
||||
"alloy 0.2.1",
|
||||
"kit",
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kinode_lib"
|
||||
authors = ["KinodeDAO"]
|
||||
version = "0.9.4"
|
||||
version = "0.9.5"
|
||||
edition = "2021"
|
||||
description = "A general-purpose sovereign cloud computing platform"
|
||||
homepage = "https://kinode.org"
|
||||
@ -17,7 +17,6 @@ members = [
|
||||
"kinode/packages/app_store/app_store", "kinode/packages/app_store/ft_worker",
|
||||
"kinode/packages/app_store/download", "kinode/packages/app_store/install", "kinode/packages/app_store/uninstall", "kinode/packages/app_store/downloads", "kinode/packages/app_store/chain",
|
||||
"kinode/packages/chess/chess",
|
||||
"kinode/packages/docs/docs",
|
||||
"kinode/packages/homepage/homepage",
|
||||
"kinode/packages/kino_updates/blog", "kinode/packages/kino_updates/globe",
|
||||
"kinode/packages/kns_indexer/kns_indexer", "kinode/packages/kns_indexer/get_block", "kinode/packages/kns_indexer/state",
|
||||
|
@ -120,7 +120,7 @@ The `sys` publisher is not a real node ID, but it's also not a special case valu
|
||||
- CTRL+J to toggle debug mode
|
||||
- CTRL+S to step through events in debug mode
|
||||
|
||||
- CTRL+L to toggle logging mode, which writes all terminal output to the `.terminal_log` file. Off by default, this will write all events and verbose prints with timestamps.
|
||||
- CTRL+L to toggle logging mode, which writes all terminal output to the `.terminal_log` file. On by default, this will write all events and verbose prints with timestamps.
|
||||
|
||||
- CTRL+A to jump to beginning of input
|
||||
- CTRL+E to jump to end of input
|
||||
|
12
entitlements.plist
Normal file
12
entitlements.plist
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Required for running Kinode binary on macOS. Do I know why? Absolutely not.
|
||||
Do not touch without testing after notarizing, changes break things unexpectedly.-->
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kinode"
|
||||
authors = ["KinodeDAO"]
|
||||
version = "0.9.4"
|
||||
version = "0.9.5"
|
||||
edition = "2021"
|
||||
description = "A general-purpose sovereign cloud computing platform"
|
||||
homepage = "https://kinode.org"
|
||||
@ -15,7 +15,7 @@ path = "src/main.rs"
|
||||
[build-dependencies]
|
||||
anyhow = "1.0.71"
|
||||
flate2 = "1.0"
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.4" }
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
|
||||
tar = "0.4"
|
||||
tokio = "1.28"
|
||||
walkdir = "2.4"
|
||||
|
@ -75,39 +75,6 @@ fn untar_gz_file(path: &Path, dest: &Path) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// fetch .tar.gz of kinode book for docs app
|
||||
fn get_kinode_book(packages_dir: &Path) -> anyhow::Result<()> {
|
||||
p!("fetching kinode book .tar.gz");
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
rt.block_on(async {
|
||||
let releases = kit::boot_fake_node::fetch_releases("kinode-dao", "kinode-book")
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{e:?}"))?;
|
||||
if releases.is_empty() {
|
||||
return Err(anyhow::anyhow!("couldn't retrieve kinode-book releases"));
|
||||
}
|
||||
let release = &releases[0];
|
||||
if release.assets.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"most recent kinode-book release has no assets"
|
||||
));
|
||||
}
|
||||
let release_url = format!(
|
||||
"https://github.com/kinode-dao/kinode-book/releases/download/{}/{}",
|
||||
release.tag_name, release.assets[0].name,
|
||||
);
|
||||
let book_dir = packages_dir.join("docs").join("pkg").join("ui");
|
||||
fs::create_dir_all(&book_dir)?;
|
||||
let book_tar_path = book_dir.join("book.tar.gz");
|
||||
kit::build::download_file(&release_url, &book_tar_path)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{e:?}"))?;
|
||||
untar_gz_file(&book_tar_path, &book_dir)?;
|
||||
fs::remove_file(book_tar_path)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn build_and_zip_package(
|
||||
entry_path: PathBuf,
|
||||
parent_pkg_path: &str,
|
||||
@ -198,12 +165,6 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
if std::env::var("SKIP_BOOK").is_ok() {
|
||||
p!("skipping book build");
|
||||
} else {
|
||||
get_kinode_book(&packages_dir)?;
|
||||
}
|
||||
|
||||
output_reruns(&packages_dir);
|
||||
|
||||
let features = get_features();
|
||||
|
@ -88,6 +88,7 @@ fn make_widget() -> String {
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
overflow-y: auto;
|
||||
padding-bottom: 30px;
|
||||
}
|
||||
|
||||
.app {
|
||||
|
8
kinode/packages/docs/.gitignore
vendored
8
kinode/packages/docs/.gitignore
vendored
@ -1,8 +0,0 @@
|
||||
*/target/
|
||||
/target
|
||||
pkg/*.wasm
|
||||
*.swp
|
||||
*.swo
|
||||
*/wasi_snapshot_preview1.wasm
|
||||
*/wit/
|
||||
*/process_env
|
3228
kinode/packages/docs/Cargo.lock
generated
3228
kinode/packages/docs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,10 +0,0 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"docs",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
panic = "abort"
|
||||
opt-level = "s"
|
||||
lto = true
|
@ -1,18 +0,0 @@
|
||||
[package]
|
||||
name = "docs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
kinode_process_lib = "0.9.1"
|
||||
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
wit-bindgen = "0.24.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[package.metadata.component]
|
||||
package = "kinode:process"
|
File diff suppressed because one or more lines are too long
@ -1,94 +0,0 @@
|
||||
use kinode_process_lib::{
|
||||
await_message, call_init, homepage, http, println, vfs, Address, LazyLoadBlob,
|
||||
};
|
||||
|
||||
wit_bindgen::generate!({
|
||||
path: "target/wit",
|
||||
world: "process-v0",
|
||||
});
|
||||
|
||||
const ICON: &str = include_str!("icon");
|
||||
|
||||
call_init!(init);
|
||||
fn init(our: Address) {
|
||||
println!("begin");
|
||||
|
||||
let mut server = http::server::HttpServer::new(5);
|
||||
// Serve the docs book dynamically from /docs:docs:sys/
|
||||
server
|
||||
.bind_http_path("/", http::server::HttpBindingConfig::default())
|
||||
.expect("failed to bind /");
|
||||
|
||||
homepage::add_to_homepage("Docs", Some(ICON), Some("index.html"), None);
|
||||
|
||||
loop {
|
||||
match await_message() {
|
||||
Err(send_error) => println!("got SendError: {send_error}"),
|
||||
Ok(ref message) => {
|
||||
// handle http requests
|
||||
// no need to validate source since capabilities limit to vfs/http_server
|
||||
let Ok(request) = server.parse_request(message.body()) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
server.handle_request(
|
||||
request,
|
||||
|incoming| {
|
||||
// client frontend sent an HTTP request, process it and
|
||||
// return an HTTP response
|
||||
// these functions can reuse the logic from handle_local_request
|
||||
// after converting the request into the appropriate format!
|
||||
match incoming.method().unwrap_or_default() {
|
||||
http::Method::GET => {
|
||||
// serve the page they requested
|
||||
match vfs::File::new(
|
||||
format!(
|
||||
"{}/pkg/ui{}",
|
||||
our.package_id(),
|
||||
incoming.path().unwrap_or_default()
|
||||
),
|
||||
5,
|
||||
)
|
||||
.read()
|
||||
{
|
||||
Ok(file) => {
|
||||
let mime_type = format!(
|
||||
"text/{}",
|
||||
incoming
|
||||
.path()
|
||||
.unwrap_or_default()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("plain")
|
||||
);
|
||||
(
|
||||
http::server::HttpResponse::new(http::StatusCode::OK)
|
||||
.header("Content-Type", mime_type),
|
||||
Some(LazyLoadBlob::new(None::<String>, file)),
|
||||
)
|
||||
}
|
||||
Err(e) => (
|
||||
http::server::HttpResponse::new(
|
||||
http::StatusCode::NOT_FOUND,
|
||||
)
|
||||
.header("Content-Type", "text/html"),
|
||||
Some(LazyLoadBlob::new(None::<String>, e.to_string())),
|
||||
),
|
||||
}
|
||||
}
|
||||
_ => (
|
||||
http::server::HttpResponse::new(
|
||||
http::StatusCode::METHOD_NOT_ALLOWED,
|
||||
),
|
||||
None,
|
||||
),
|
||||
}
|
||||
},
|
||||
|_channel_id, _message_type, _message| {
|
||||
// client frontend sent a websocket message, ignore
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "docs",
|
||||
"description": "",
|
||||
"image": "",
|
||||
"properties": {
|
||||
"package_name": "docs",
|
||||
"current_version": "0.1.0",
|
||||
"publisher": "sys",
|
||||
"mirrors": [],
|
||||
"code_hashes": {
|
||||
"0.1.0": ""
|
||||
},
|
||||
"wit_version": 0,
|
||||
"dependencies": []
|
||||
},
|
||||
"external_url": "",
|
||||
"animation_url": ""
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
[
|
||||
{
|
||||
"process_name": "docs",
|
||||
"process_wasm_path": "/docs.wasm",
|
||||
"on_exit": "Restart",
|
||||
"request_networking": false,
|
||||
"request_capabilities": [
|
||||
"homepage:homepage:sys",
|
||||
"http_server:distro:sys",
|
||||
"vfs:distro:sys"
|
||||
],
|
||||
"grant_capabilities": [
|
||||
"http_server:distro:sys",
|
||||
"vfs:distro:sys"
|
||||
],
|
||||
"public": false
|
||||
}
|
||||
]
|
@ -1,10 +1,9 @@
|
||||
#![feature(let_chains)]
|
||||
use crate::kinode::process::homepage::{AddRequest, Request as HomepageRequest};
|
||||
use kinode_process_lib::{
|
||||
await_message, call_init, get_blob, http, http::server, println, Address, LazyLoadBlob,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
/// Fetching OS version from main package
|
||||
const CARGO_TOML: &str = include_str!("../../../../Cargo.toml");
|
||||
@ -26,9 +25,11 @@ struct HomepageApp {
|
||||
base64_icon: Option<String>,
|
||||
widget: Option<String>,
|
||||
order: u32,
|
||||
favorite: bool,
|
||||
favorite: bool, // **not currently used on frontend**
|
||||
}
|
||||
|
||||
type PersistedAppOrder = HashMap<String, u32>;
|
||||
|
||||
wit_bindgen::generate!({
|
||||
path: "target/wit",
|
||||
world: "homepage-sys-v0",
|
||||
@ -162,6 +163,11 @@ fn init(our: Address) {
|
||||
.bind_http_path("/order", http_config)
|
||||
.expect("failed to bind /order");
|
||||
|
||||
// load persisted app order
|
||||
let mut persisted_app_order =
|
||||
kinode_process_lib::get_typed_state(|bytes| serde_json::from_slice(bytes))
|
||||
.unwrap_or(PersistedAppOrder::new());
|
||||
|
||||
loop {
|
||||
let Ok(ref message) = await_message() else {
|
||||
// we never send requests, so this will never happen
|
||||
@ -238,11 +244,15 @@ fn init(our: Address) {
|
||||
None,
|
||||
);
|
||||
};
|
||||
for (app_id, order) in order_list {
|
||||
if let Some(app) = app_data.get_mut(&app_id) {
|
||||
app.order = order;
|
||||
for (app_id, order) in &order_list {
|
||||
if let Some(app) = app_data.get_mut(app_id) {
|
||||
app.order = *order;
|
||||
}
|
||||
}
|
||||
persisted_app_order = order_list.into_iter().collect();
|
||||
kinode_process_lib::set_state(
|
||||
&serde_json::to_vec(&persisted_app_order).unwrap(),
|
||||
);
|
||||
(server::HttpResponse::new(http::StatusCode::OK), None)
|
||||
}
|
||||
_ => (server::HttpResponse::new(http::StatusCode::NOT_FOUND), None),
|
||||
@ -264,10 +274,11 @@ fn init(our: Address) {
|
||||
path,
|
||||
widget,
|
||||
}) => {
|
||||
let id = message.source().process.to_string();
|
||||
app_data.insert(
|
||||
message.source().process.to_string(),
|
||||
id.clone(),
|
||||
HomepageApp {
|
||||
id: message.source().process.to_string(),
|
||||
id: id.clone(),
|
||||
process: message.source().process().to_string(),
|
||||
package: message.source().package().to_string(),
|
||||
publisher: message.source().publisher().to_string(),
|
||||
@ -281,14 +292,20 @@ fn init(our: Address) {
|
||||
label,
|
||||
base64_icon: icon,
|
||||
widget,
|
||||
order: app_data.len() as u32,
|
||||
order: if let Some(order) = persisted_app_order.get(&id) {
|
||||
*order
|
||||
} else {
|
||||
app_data.len() as u32
|
||||
},
|
||||
favorite: DEFAULT_FAVES
|
||||
.contains(&message.source().process.to_string().as_str()),
|
||||
},
|
||||
);
|
||||
}
|
||||
HomepageRequest::Remove => {
|
||||
app_data.remove(&message.source().process.to_string());
|
||||
let id = message.source().process.to_string();
|
||||
app_data.remove(&id);
|
||||
persisted_app_order.remove(&id);
|
||||
}
|
||||
HomepageRequest::SetStylesheet(new_stylesheet_string) => {
|
||||
// ONLY settings:settings:sys may call this request
|
||||
|
@ -1,14 +1,13 @@
|
||||
import React, { useState, useEffect, useMemo } from "react";
|
||||
import useHomepageStore from "../store/homepageStore";
|
||||
import usePersistentStore from "../store/persistentStore";
|
||||
import useHomepageStore, { HomepageApp } from "../store/homepageStore";
|
||||
import AppDisplay from "./AppDisplay";
|
||||
|
||||
const AllApps: React.FC = () => {
|
||||
const { apps } = useHomepageStore();
|
||||
const { appOrder, setAppOrder } = usePersistentStore();
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
const [isMobile, setIsMobile] = useState(false);
|
||||
const [visibleApps, setVisibleApps] = useState(5);
|
||||
const [orderedApps, setOrderedApps] = useState<HomepageApp[]>([]);
|
||||
const [draggedIndex, setDraggedIndex] = useState<number | null>(null);
|
||||
const [dragOverIndex, setDragOverIndex] = useState<number | null>(null);
|
||||
|
||||
@ -26,13 +25,14 @@ const AllApps: React.FC = () => {
|
||||
|
||||
// Sort apps based on persisted order
|
||||
const sortedApps = useMemo(() => {
|
||||
const orderedApps = [...apps].sort((a, b) => {
|
||||
return appOrder.indexOf(a.id) - appOrder.indexOf(b.id);
|
||||
if (!orderedApps.length) {
|
||||
setOrderedApps(apps);
|
||||
}
|
||||
const o = [...orderedApps].sort((a, b) => {
|
||||
return a.order - b.order;
|
||||
});
|
||||
// Ensure all apps are included in the order
|
||||
const missingApps = apps.filter((app) => !appOrder.includes(app.id));
|
||||
return [...orderedApps, ...missingApps];
|
||||
}, [apps, appOrder]);
|
||||
return o.filter(app => app.path !== null);
|
||||
}, [orderedApps, apps]);
|
||||
|
||||
const displayedApps = expanded
|
||||
? sortedApps
|
||||
@ -67,29 +67,43 @@ const AllApps: React.FC = () => {
|
||||
const [movedApp] = newSortedApps.splice(dragIndex, 1);
|
||||
newSortedApps.splice(dropIndex, 0, movedApp);
|
||||
|
||||
const newAppOrder = newSortedApps.map((app) => app.id);
|
||||
setAppOrder(newAppOrder);
|
||||
const updatedApps = newSortedApps.map((app, index) => ({
|
||||
...app,
|
||||
order: index
|
||||
}));
|
||||
|
||||
setOrderedApps(updatedApps);
|
||||
|
||||
// Sync the order with the backend
|
||||
fetch('/order', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
credentials: 'include',
|
||||
body: JSON.stringify(newSortedApps.map((app, index) => [app.id, index]))
|
||||
});
|
||||
|
||||
handleDragEnd();
|
||||
};
|
||||
|
||||
return (
|
||||
<div id="all-apps" className={isMobile ? "mobile" : ""}>
|
||||
<div
|
||||
className={`apps-grid ${expanded ? "expanded" : ""} ${
|
||||
isMobile ? "mobile" : ""
|
||||
}`}
|
||||
className={`apps-grid ${expanded ? "expanded" : ""} ${isMobile ? "mobile" : ""
|
||||
}`}
|
||||
style={{ gridTemplateColumns: `repeat(${Math.min(displayedApps.length, 5)}, 1fr)` }}
|
||||
>
|
||||
{displayedApps.map((app, index) => (
|
||||
<div
|
||||
key={app.id}
|
||||
key={`${app.id}-${app.order}`}
|
||||
draggable
|
||||
onDragStart={(e) => handleDragStart(e, index)}
|
||||
onDragOver={(e) => handleDragOver(e, index)}
|
||||
onDragEnd={handleDragEnd}
|
||||
onDrop={(e) => handleDrop(e, index)}
|
||||
className={`app-wrapper ${
|
||||
draggedIndex === index ? "dragging" : ""
|
||||
} ${dragOverIndex === index ? "drag-over" : ""}`}
|
||||
className={`app-wrapper ${draggedIndex === index ? "dragging" : ""
|
||||
} ${dragOverIndex === index ? "drag-over" : ""}`}
|
||||
>
|
||||
<AppDisplay app={app} />
|
||||
<div className="drag-handle">⋮⋮</div>
|
||||
|
@ -14,10 +14,10 @@ const AppDisplay: React.FC<AppDisplayProps> = ({ app }) => {
|
||||
style={
|
||||
!app?.path
|
||||
? {
|
||||
pointerEvents: "none",
|
||||
textDecoration: "none !important",
|
||||
filter: "grayscale(100%)",
|
||||
}
|
||||
pointerEvents: "none",
|
||||
textDecoration: "none !important",
|
||||
filter: "grayscale(100%)",
|
||||
}
|
||||
: {}
|
||||
}
|
||||
>
|
||||
|
@ -1,82 +0,0 @@
|
||||
import useHomepageStore, { HomepageApp } from "../store/homepageStore"
|
||||
import usePersistentStore from "../store/persistentStore"
|
||||
import AppDisplay from "./AppDisplay"
|
||||
import { useEffect, useState } from "react"
|
||||
import { DragDropContext, Draggable, DropResult, Droppable } from '@hello-pangea/dnd'
|
||||
|
||||
const AppsDock: React.FC = () => {
|
||||
const { apps } = useHomepageStore()
|
||||
const { appOrder, setAppOrder } = usePersistentStore()
|
||||
const [dockedApps, setDockedApps] = useState<HomepageApp[]>([])
|
||||
|
||||
useEffect(() => {
|
||||
// Sort apps based on persisted order
|
||||
const orderedApps = apps.filter(app => app.favorite).sort((a, b) => {
|
||||
return appOrder.indexOf(a.id) - appOrder.indexOf(b.id);
|
||||
});
|
||||
setDockedApps(orderedApps);
|
||||
|
||||
// Sync the order with the backend
|
||||
fetch('/order', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
credentials: 'include',
|
||||
body: JSON.stringify(orderedApps.map(app => [app.id, appOrder.indexOf(app.id)]))
|
||||
});
|
||||
}, [apps, appOrder])
|
||||
|
||||
const onDragEnd = (result: DropResult) => {
|
||||
if (!result.destination) {
|
||||
return;
|
||||
}
|
||||
|
||||
const reorderedApps = Array.from(dockedApps);
|
||||
const [reorderedItem] = reorderedApps.splice(result.source.index, 1);
|
||||
reorderedApps.splice(result.destination.index, 0, reorderedItem);
|
||||
|
||||
const newAppOrder = reorderedApps.map(app => app.id);
|
||||
setAppOrder(newAppOrder);
|
||||
setDockedApps(reorderedApps);
|
||||
|
||||
fetch('/order', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
credentials: 'include',
|
||||
body: JSON.stringify(reorderedApps.map((app, index) => [app.id, index]))
|
||||
});
|
||||
}
|
||||
|
||||
return <DragDropContext onDragEnd={onDragEnd}>
|
||||
<Droppable droppableId="droppable" direction="horizontal">
|
||||
{(provided, _snapshot) => (
|
||||
<div
|
||||
ref={provided.innerRef}
|
||||
{...provided.droppableProps}
|
||||
>
|
||||
{dockedApps.map((app, index) => <Draggable
|
||||
key={app.id}
|
||||
draggableId={app.id}
|
||||
index={index}
|
||||
>
|
||||
{(provided, _snapshot) => (
|
||||
<div
|
||||
ref={provided.innerRef}
|
||||
{...provided.draggableProps}
|
||||
{...provided.dragHandleProps}
|
||||
className="docked-app"
|
||||
>
|
||||
<AppDisplay app={app} />
|
||||
</div>
|
||||
)}
|
||||
</Draggable>)}
|
||||
</div>
|
||||
)}
|
||||
</Droppable>
|
||||
</DragDropContext>
|
||||
}
|
||||
|
||||
export default AppsDock
|
@ -1,7 +1,6 @@
|
||||
import useHomepageStore, { HomepageApp } from "../store/homepageStore"
|
||||
import Widget from "./Widget"
|
||||
import usePersistentStore from "../store/persistentStore"
|
||||
import { DragDropContext, Draggable, DropResult, Droppable } from '@hello-pangea/dnd'
|
||||
import { useEffect, useState } from "react"
|
||||
|
||||
const Widgets = () => {
|
||||
@ -9,6 +8,9 @@ const Widgets = () => {
|
||||
const { widgetSettings, widgetOrder, setWidgetOrder } = usePersistentStore();
|
||||
const [orderedWidgets, setOrderedWidgets] = useState<HomepageApp[]>([]);
|
||||
|
||||
const [draggedIndex, setDraggedIndex] = useState<number | null>(null);
|
||||
const [dragOverIndex, setDragOverIndex] = useState<number | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const visibleWidgets = apps.filter((app) => app.widget && !widgetSettings[app.id]?.hide);
|
||||
const orderedVisibleWidgets = visibleWidgets.sort((a, b) => {
|
||||
@ -17,51 +19,59 @@ const Widgets = () => {
|
||||
setOrderedWidgets(orderedVisibleWidgets);
|
||||
}, [apps, widgetSettings, widgetOrder]);
|
||||
|
||||
const onDragEnd = (result: DropResult) => {
|
||||
if (!result.destination) {
|
||||
return;
|
||||
}
|
||||
const handleDragStart = (e: React.DragEvent, index: number) => {
|
||||
e.dataTransfer.setData("text/plain", index.toString());
|
||||
setDraggedIndex(index);
|
||||
};
|
||||
|
||||
const reorderedWidgets = Array.from(orderedWidgets);
|
||||
const [reorderedItem] = reorderedWidgets.splice(result.source.index, 1);
|
||||
reorderedWidgets.splice(result.destination.index, 0, reorderedItem);
|
||||
const handleDragOver = (e: React.DragEvent, index: number) => {
|
||||
e.preventDefault();
|
||||
setDragOverIndex(index);
|
||||
};
|
||||
|
||||
const newWidgetOrder = reorderedWidgets.map(widget => widget.id);
|
||||
const handleDragEnd = () => {
|
||||
setDraggedIndex(null);
|
||||
setDragOverIndex(null);
|
||||
};
|
||||
|
||||
const handleDrop = (e: React.DragEvent, dropIndex: number) => {
|
||||
e.preventDefault();
|
||||
const dragIndex = parseInt(e.dataTransfer.getData("text/plain"), 10);
|
||||
if (dragIndex === dropIndex) return;
|
||||
|
||||
const newSortedWidgets = [...orderedWidgets];
|
||||
const [movedWidget] = newSortedWidgets.splice(dragIndex, 1);
|
||||
newSortedWidgets.splice(dropIndex, 0, movedWidget);
|
||||
|
||||
const newWidgetOrder = newSortedWidgets.map((wid) => wid.id);
|
||||
setWidgetOrder(newWidgetOrder);
|
||||
setOrderedWidgets(reorderedWidgets);
|
||||
}
|
||||
handleDragEnd();
|
||||
};
|
||||
|
||||
return (
|
||||
<DragDropContext onDragEnd={onDragEnd}>
|
||||
<Droppable droppableId="widgets">
|
||||
{(provided, _snapshot) => (
|
||||
<div
|
||||
id="widgets"
|
||||
ref={provided.innerRef}
|
||||
{...provided.droppableProps}
|
||||
>
|
||||
{orderedWidgets.map((app, index) => (
|
||||
<Draggable key={app.id} draggableId={app.id} index={index}>
|
||||
{(provided, _snapshot) => (
|
||||
<div
|
||||
ref={provided.innerRef}
|
||||
{...provided.draggableProps}
|
||||
{...provided.dragHandleProps}
|
||||
>
|
||||
<Widget
|
||||
id={app.id}
|
||||
label={app.label}
|
||||
widget={app.widget!}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</Draggable>
|
||||
))}
|
||||
{provided.placeholder}
|
||||
</div>
|
||||
)}
|
||||
</Droppable>
|
||||
</DragDropContext>
|
||||
<div
|
||||
id="widgets"
|
||||
>
|
||||
{orderedWidgets.map((wid, index) => (
|
||||
<div
|
||||
key={wid.id}
|
||||
draggable
|
||||
onDragStart={(e) => handleDragStart(e, index)}
|
||||
onDragOver={(e) => handleDragOver(e, index)}
|
||||
onDragEnd={handleDragEnd}
|
||||
onDrop={(e) => handleDrop(e, index)}
|
||||
className={`widget-wrapper ${draggedIndex === index ? "dragging" : ""
|
||||
} ${dragOverIndex === index ? "drag-over" : ""}`}
|
||||
>
|
||||
<Widget
|
||||
id={wid.id}
|
||||
label={wid.label}
|
||||
widget={wid.widget!}
|
||||
/>
|
||||
<div className="drag-handle">⋮⋮</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -62,13 +62,6 @@ header button {
|
||||
margin-left: 4px;
|
||||
}
|
||||
|
||||
#widgets-header {
|
||||
color: light-dark(var(--off-white), var(--tasteful-dark));
|
||||
border-color: light-dark(var(--off-white), var(--tasteful-dark));
|
||||
padding: 1em 0em 1em 0em;
|
||||
border-bottom: 1px solid light-dark(var(--tasteful-dark), var(--off-white));
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: light) {
|
||||
.kino-bird {
|
||||
filter: invert(1);
|
||||
@ -77,19 +70,6 @@ header button {
|
||||
|
||||
@media (prefers-color-scheme: dark) {}
|
||||
|
||||
[data-rfd-droppable-context-id] {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
flex-wrap: wrap;
|
||||
gap: 10px;
|
||||
justify-content: center;
|
||||
background-color: light-dark(#4f000055, var(--tasteful-dark));
|
||||
padding: 10px;
|
||||
border-radius: 10px;
|
||||
margin: 0 auto;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.no-ui {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
@ -111,6 +91,7 @@ header button {
|
||||
}
|
||||
|
||||
#widgets {
|
||||
margin-top: 1em;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
@ -178,7 +159,6 @@ footer {
|
||||
|
||||
.apps-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(5, 1fr);
|
||||
width: 100%;
|
||||
color: var(--off-white);
|
||||
}
|
||||
@ -193,7 +173,7 @@ footer {
|
||||
border-radius: 0 0 1em 1em;
|
||||
border: 0.5px solid rgba(255, 255, 255, 0.2);
|
||||
padding: 1em;
|
||||
color: light-dark(var(--white), var(--tasteful-dark));
|
||||
color: var(--white);
|
||||
}
|
||||
|
||||
@media (max-width: 1024px) {
|
||||
@ -302,10 +282,19 @@ footer {
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
|
||||
.widget-wrapper {
|
||||
position: relative;
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
|
||||
.app-wrapper:hover .drag-handle {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.widget-wrapper:hover .drag-handle {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.drag-handle {
|
||||
position: absolute;
|
||||
top: 5px;
|
||||
@ -313,6 +302,9 @@ footer {
|
||||
cursor: move;
|
||||
opacity: 0;
|
||||
transition: opacity 0.2s ease;
|
||||
color: var(--white);
|
||||
font-size: 1.2em;
|
||||
text-shadow: -1px 1px 0px #000;
|
||||
}
|
||||
|
||||
.dragging {
|
||||
|
@ -46,13 +46,13 @@ function Homepage() {
|
||||
<KinodeBird />
|
||||
<h2>
|
||||
{new Date().getHours() < 4
|
||||
? "Good evening"
|
||||
? "Good evening" // midnight to 4am
|
||||
: new Date().getHours() < 12
|
||||
? "Good morning"
|
||||
: new Date().getHours() < 18
|
||||
? "Good afternoon"
|
||||
: "Good evening"}
|
||||
, {our}
|
||||
? "Good morning" // 4am to 11am
|
||||
: new Date().getHours() < 18
|
||||
? "Good afternoon" // 12pm to 5pm
|
||||
: "Good evening" // 5pm to midnight
|
||||
}, {our}
|
||||
</h2>
|
||||
<a
|
||||
href="https://github.com/kinode-dao/kinode/releases"
|
||||
@ -60,21 +60,18 @@ function Homepage() {
|
||||
>
|
||||
[kinode v{version}]
|
||||
</a>
|
||||
<a
|
||||
href="#"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
setShowWidgetsSettings(true);
|
||||
}}
|
||||
>
|
||||
[⚙]
|
||||
</a>
|
||||
</header>
|
||||
|
||||
<div id="widgets-container">
|
||||
<header id="widgets-header">
|
||||
<h2>Widgets</h2>
|
||||
<a
|
||||
href="#"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
setShowWidgetsSettings(true);
|
||||
}}
|
||||
>
|
||||
[⚙]
|
||||
</a>
|
||||
</header>
|
||||
<Widgets />
|
||||
</div>
|
||||
<footer>
|
||||
|
@ -87,6 +87,7 @@ fn create_widget(posts: Vec<KinodeBlogPost>) -> String {
|
||||
scrollbar-color: transparent transparent;
|
||||
scrollbar-width: none;
|
||||
align-self: stretch;
|
||||
padding-bottom: 30px;
|
||||
}}
|
||||
|
||||
.post {{
|
||||
|
@ -78,7 +78,9 @@ async fn main() {
|
||||
let password = matches.get_one::<String>("password");
|
||||
|
||||
// logging mode is toggled at runtime by CTRL+L
|
||||
let is_logging = *matches.get_one::<bool>("logging").unwrap();
|
||||
let is_logging = !*matches.get_one::<bool>("logging-off").unwrap();
|
||||
let max_log_size = matches.get_one::<u64>("max-log-size");
|
||||
let number_log_files = matches.get_one::<u64>("number-log-files");
|
||||
|
||||
// detached determines whether terminal is interactive
|
||||
let detached = *matches.get_one::<bool>("detached").unwrap();
|
||||
@ -427,6 +429,8 @@ async fn main() {
|
||||
detached,
|
||||
verbose_mode,
|
||||
is_logging,
|
||||
max_log_size.copied(),
|
||||
number_log_files.copied(),
|
||||
) => {
|
||||
match quit {
|
||||
Ok(()) => {
|
||||
@ -653,7 +657,7 @@ fn build_command() -> Command {
|
||||
.value_parser(value_parser!(u8)),
|
||||
)
|
||||
.arg(
|
||||
arg!(-l --logging <IS_LOGGING> "Run in logging mode (toggled at runtime by CTRL+L): write all terminal output to .terminal_log file")
|
||||
arg!(-l --"logging-off" <IS_NOT_LOGGING> "Run in non-logging mode (toggled at runtime by CTRL+L): do not write all terminal output to file in .terminal_logs directory")
|
||||
.action(clap::ArgAction::SetTrue),
|
||||
)
|
||||
.arg(
|
||||
@ -666,7 +670,15 @@ fn build_command() -> Command {
|
||||
.action(clap::ArgAction::SetTrue),
|
||||
)
|
||||
.arg(arg!(--rpc <RPC> "Add a WebSockets RPC URL at boot"))
|
||||
.arg(arg!(--password <PASSWORD> "Node password (in double quotes)"));
|
||||
.arg(arg!(--password <PASSWORD> "Node password (in double quotes)"))
|
||||
.arg(
|
||||
arg!(--"max-log-size" <MAX_LOG_SIZE_BYTES> "Max size of all logs in bytes; setting to 0 -> no size limit (default 16MB)")
|
||||
.value_parser(value_parser!(u64)),
|
||||
)
|
||||
.arg(
|
||||
arg!(--"number-log-files" <NUMBER_LOG_FILES> "Number of logs to rotate (default 4)")
|
||||
.value_parser(value_parser!(u64)),
|
||||
);
|
||||
|
||||
#[cfg(feature = "simulation-mode")]
|
||||
let app = app
|
||||
|
@ -13,7 +13,7 @@ use lib::types::core::{
|
||||
};
|
||||
use std::{
|
||||
fs::{read_to_string, OpenOptions},
|
||||
io::{BufWriter, Write},
|
||||
io::BufWriter,
|
||||
};
|
||||
use tokio::signal::unix::{signal, SignalKind};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
@ -22,8 +22,8 @@ pub mod utils;
|
||||
|
||||
struct State {
|
||||
pub stdout: std::io::Stdout,
|
||||
/// handle for writing to on-disk log (disabled by default, triggered by CTRL+L)
|
||||
pub log_writer: BufWriter<std::fs::File>,
|
||||
/// handle and settings for on-disk log (disabled by default, triggered by CTRL+L)
|
||||
pub logger: utils::Logger,
|
||||
/// in-memory searchable command history that persists itself on disk (default size: 1000)
|
||||
pub command_history: utils::CommandHistory,
|
||||
/// terminal window width, 0 is leftmost column
|
||||
@ -182,6 +182,8 @@ pub async fn terminal(
|
||||
is_detached: bool,
|
||||
verbose_mode: u8,
|
||||
is_logging: bool,
|
||||
max_log_size: Option<u64>,
|
||||
number_log_files: Option<u64>,
|
||||
) -> anyhow::Result<()> {
|
||||
let (stdout, _maybe_raw_mode) = utils::splash(&our, version, is_detached)?;
|
||||
|
||||
@ -214,20 +216,15 @@ pub async fn terminal(
|
||||
|
||||
// if CTRL+L is used to turn on logging, all prints to terminal
|
||||
// will also be written with their full timestamp to the .terminal_log file.
|
||||
// logging mode is always off by default. TODO add a boot flag to change this.
|
||||
let log_path = std::fs::canonicalize(&home_directory_path)
|
||||
.expect("terminal: could not get path for .terminal_log file")
|
||||
.join(".terminal_log");
|
||||
let log_handle = OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open(&log_path)
|
||||
.expect("terminal: could not open/create .terminal_log");
|
||||
let log_writer = BufWriter::new(log_handle);
|
||||
// logging mode is always on by default
|
||||
let log_dir_path = std::fs::canonicalize(&home_directory_path)
|
||||
.expect("terminal: could not get path for .terminal_logs dir")
|
||||
.join(".terminal_logs");
|
||||
let logger = utils::Logger::new(log_dir_path, max_log_size, number_log_files);
|
||||
|
||||
let mut state = State {
|
||||
stdout,
|
||||
log_writer,
|
||||
logger,
|
||||
command_history,
|
||||
win_cols,
|
||||
win_rows,
|
||||
@ -320,21 +317,16 @@ fn handle_printout(printout: Printout, state: &mut State) -> anyhow::Result<()>
|
||||
// lock here so that runtime can still use println! without freezing..
|
||||
// can lock before loop later if we want to reduce overhead
|
||||
let mut stdout = state.stdout.lock();
|
||||
let now = Local::now();
|
||||
// always write print to log if in logging mode
|
||||
if state.logging_mode {
|
||||
writeln!(
|
||||
state.log_writer,
|
||||
"[{}] {}",
|
||||
now.to_rfc2822(),
|
||||
printout.content
|
||||
)?;
|
||||
state.logger.write(&printout.content)?;
|
||||
}
|
||||
// skip writing print to terminal if it's of a greater
|
||||
// verbosity level than our current mode
|
||||
if printout.verbosity > state.verbose_mode {
|
||||
return Ok(());
|
||||
}
|
||||
let now = Local::now();
|
||||
execute!(
|
||||
stdout,
|
||||
// print goes immediately above the dedicated input line at bottom
|
||||
|
@ -2,12 +2,16 @@ use crossterm::terminal::{disable_raw_mode, enable_raw_mode};
|
||||
use lib::types::core::Identity;
|
||||
use std::{
|
||||
collections::VecDeque,
|
||||
fs::File,
|
||||
fs::{File, OpenOptions},
|
||||
io::{BufWriter, Stdout, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
const DEFAULT_MAX_LOGS_BYTES: u64 = 16_000_000;
|
||||
const DEFAULT_NUMBER_LOG_FILES: u64 = 4;
|
||||
|
||||
pub struct RawMode;
|
||||
impl RawMode {
|
||||
fn new() -> std::io::Result<Self> {
|
||||
@ -331,3 +335,113 @@ pub fn truncate_in_place(
|
||||
.collect::<String>()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Logger {
|
||||
pub log_dir_path: PathBuf,
|
||||
pub strategy: LoggerStrategy,
|
||||
log_writer: BufWriter<std::fs::File>,
|
||||
}
|
||||
|
||||
pub enum LoggerStrategy {
|
||||
Rotating {
|
||||
max_log_dir_bytes: u64,
|
||||
number_log_files: u64,
|
||||
},
|
||||
Infinite,
|
||||
}
|
||||
|
||||
impl LoggerStrategy {
|
||||
fn new(max_log_size: Option<u64>, number_log_files: Option<u64>) -> Self {
|
||||
let max_log_size = max_log_size.unwrap_or_else(|| DEFAULT_MAX_LOGS_BYTES);
|
||||
let number_log_files = number_log_files.unwrap_or_else(|| DEFAULT_NUMBER_LOG_FILES);
|
||||
if max_log_size == 0 {
|
||||
LoggerStrategy::Infinite
|
||||
} else {
|
||||
LoggerStrategy::Rotating {
|
||||
max_log_dir_bytes: max_log_size,
|
||||
number_log_files,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Logger {
|
||||
pub fn new(
|
||||
log_dir_path: PathBuf,
|
||||
max_log_size: Option<u64>,
|
||||
number_log_files: Option<u64>,
|
||||
) -> Self {
|
||||
let log_writer = make_log_writer(&log_dir_path).unwrap();
|
||||
Self {
|
||||
log_dir_path,
|
||||
log_writer,
|
||||
strategy: LoggerStrategy::new(max_log_size, number_log_files),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write(&mut self, line: &str) -> anyhow::Result<()> {
|
||||
let now = chrono::Local::now();
|
||||
let line = &format!("[{}] {}", now.to_rfc2822(), line);
|
||||
match self.strategy {
|
||||
LoggerStrategy::Infinite => {}
|
||||
LoggerStrategy::Rotating {
|
||||
max_log_dir_bytes,
|
||||
number_log_files,
|
||||
} => {
|
||||
// check whether to rotate
|
||||
let line_bytes = line.len();
|
||||
let file_bytes = self.log_writer.get_ref().metadata()?.len() as usize;
|
||||
if line_bytes + file_bytes >= (max_log_dir_bytes / number_log_files) as usize {
|
||||
// rotate
|
||||
self.log_writer = make_log_writer(&self.log_dir_path)?;
|
||||
|
||||
// clean up oldest if necessary
|
||||
remove_oldest_if_exceeds(&self.log_dir_path, number_log_files as usize)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(self.log_writer, "{}", line)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn make_log_writer(log_dir_path: &Path) -> anyhow::Result<BufWriter<std::fs::File>> {
|
||||
if !log_dir_path.exists() {
|
||||
std::fs::create_dir(log_dir_path)?;
|
||||
}
|
||||
let now = chrono::Local::now();
|
||||
let log_name = format!("{}.log", now.format("%Y-%m-%d-%H:%M:%S"));
|
||||
let log_path = log_dir_path.join(log_name);
|
||||
let log_handle = OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open(&log_path)?;
|
||||
Ok(BufWriter::new(log_handle))
|
||||
}
|
||||
|
||||
fn remove_oldest_if_exceeds<P: AsRef<Path>>(path: P, max_items: usize) -> anyhow::Result<()> {
|
||||
let mut entries = Vec::new();
|
||||
|
||||
// Collect all entries and their modification times
|
||||
for entry in std::fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
if let Ok(metadata) = entry.metadata() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
entries.push((modified, entry.path()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the number of entries exceeds the max_items, remove the oldest
|
||||
while entries.len() > max_items {
|
||||
// Sort entries by modification time (oldest first)
|
||||
entries.sort_by_key(|e| e.0);
|
||||
|
||||
let (_, path) = entries.remove(0);
|
||||
std::fs::remove_file(&path)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "lib"
|
||||
authors = ["KinodeDAO"]
|
||||
version = "0.9.4"
|
||||
version = "0.9.5"
|
||||
edition = "2021"
|
||||
description = "A general-purpose sovereign cloud computing platform"
|
||||
homepage = "https://kinode.org"
|
||||
@ -11,7 +11,7 @@ license = "Apache-2.0"
|
||||
[lib]
|
||||
|
||||
[build-dependencies]
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.4" }
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
|
||||
tokio = "1.28"
|
||||
|
||||
[dependencies]
|
||||
|
Loading…
Reference in New Issue
Block a user