Merge branch 'develop' into hf/add-docker-build-images

This commit is contained in:
hosted-fornet 2024-09-26 17:19:40 -07:00
commit 5061ea860e
27 changed files with 311 additions and 3678 deletions

22
Cargo.lock generated
View File

@ -2414,18 +2414,6 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "docs"
version = "0.1.0"
dependencies = [
"anyhow",
"kinode_process_lib 0.9.1",
"process_macros",
"serde",
"serde_json",
"wit-bindgen",
]
[[package]] [[package]]
name = "download" name = "download"
version = "0.1.0" version = "0.1.0"
@ -3651,7 +3639,7 @@ dependencies = [
[[package]] [[package]]
name = "kinode" name = "kinode"
version = "0.9.4" version = "0.9.5"
dependencies = [ dependencies = [
"aes-gcm", "aes-gcm",
"alloy 0.2.1", "alloy 0.2.1",
@ -3712,7 +3700,7 @@ dependencies = [
[[package]] [[package]]
name = "kinode_lib" name = "kinode_lib"
version = "0.9.4" version = "0.9.5"
dependencies = [ dependencies = [
"lib", "lib",
] ]
@ -3764,8 +3752,8 @@ dependencies = [
[[package]] [[package]]
name = "kit" name = "kit"
version = "0.7.4" version = "0.7.6"
source = "git+https://github.com/kinode-dao/kit?tag=v0.7.4#a995659fd1a41501d893cacf032175f343796096" source = "git+https://github.com/kinode-dao/kit?tag=v0.7.6#da6676d79dfdaf47c00ce1d24874fd52de44f717"
dependencies = [ dependencies = [
"alloy 0.1.4", "alloy 0.1.4",
"alloy-sol-macro", "alloy-sol-macro",
@ -3835,7 +3823,7 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]] [[package]]
name = "lib" name = "lib"
version = "0.9.4" version = "0.9.5"
dependencies = [ dependencies = [
"alloy 0.2.1", "alloy 0.2.1",
"kit", "kit",

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kinode_lib" name = "kinode_lib"
authors = ["KinodeDAO"] authors = ["KinodeDAO"]
version = "0.9.4" version = "0.9.5"
edition = "2021" edition = "2021"
description = "A general-purpose sovereign cloud computing platform" description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org" homepage = "https://kinode.org"
@ -17,7 +17,6 @@ members = [
"kinode/packages/app_store/app_store", "kinode/packages/app_store/ft_worker", "kinode/packages/app_store/app_store", "kinode/packages/app_store/ft_worker",
"kinode/packages/app_store/download", "kinode/packages/app_store/install", "kinode/packages/app_store/uninstall", "kinode/packages/app_store/downloads", "kinode/packages/app_store/chain", "kinode/packages/app_store/download", "kinode/packages/app_store/install", "kinode/packages/app_store/uninstall", "kinode/packages/app_store/downloads", "kinode/packages/app_store/chain",
"kinode/packages/chess/chess", "kinode/packages/chess/chess",
"kinode/packages/docs/docs",
"kinode/packages/homepage/homepage", "kinode/packages/homepage/homepage",
"kinode/packages/kino_updates/blog", "kinode/packages/kino_updates/globe", "kinode/packages/kino_updates/blog", "kinode/packages/kino_updates/globe",
"kinode/packages/kns_indexer/kns_indexer", "kinode/packages/kns_indexer/get_block", "kinode/packages/kns_indexer/state", "kinode/packages/kns_indexer/kns_indexer", "kinode/packages/kns_indexer/get_block", "kinode/packages/kns_indexer/state",

View File

@ -120,7 +120,7 @@ The `sys` publisher is not a real node ID, but it's also not a special case valu
- CTRL+J to toggle debug mode - CTRL+J to toggle debug mode
- CTRL+S to step through events in debug mode - CTRL+S to step through events in debug mode
- CTRL+L to toggle logging mode, which writes all terminal output to the `.terminal_log` file. Off by default, this will write all events and verbose prints with timestamps. - CTRL+L to toggle logging mode, which writes all terminal output to the `.terminal_log` file. On by default, this will write all events and verbose prints with timestamps.
- CTRL+A to jump to beginning of input - CTRL+A to jump to beginning of input
- CTRL+E to jump to end of input - CTRL+E to jump to end of input

12
entitlements.plist Normal file
View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Required for running Kinode binary on macOS. Do I know why? Absolutely not.
Do not touch without testing after notarizing, changes break things unexpectedly.-->
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
</dict>
</plist>

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kinode" name = "kinode"
authors = ["KinodeDAO"] authors = ["KinodeDAO"]
version = "0.9.4" version = "0.9.5"
edition = "2021" edition = "2021"
description = "A general-purpose sovereign cloud computing platform" description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org" homepage = "https://kinode.org"
@ -15,7 +15,7 @@ path = "src/main.rs"
[build-dependencies] [build-dependencies]
anyhow = "1.0.71" anyhow = "1.0.71"
flate2 = "1.0" flate2 = "1.0"
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.4" } kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
tar = "0.4" tar = "0.4"
tokio = "1.28" tokio = "1.28"
walkdir = "2.4" walkdir = "2.4"

View File

@ -75,39 +75,6 @@ fn untar_gz_file(path: &Path, dest: &Path) -> std::io::Result<()> {
Ok(()) Ok(())
} }
/// fetch .tar.gz of kinode book for docs app
fn get_kinode_book(packages_dir: &Path) -> anyhow::Result<()> {
p!("fetching kinode book .tar.gz");
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
let releases = kit::boot_fake_node::fetch_releases("kinode-dao", "kinode-book")
.await
.map_err(|e| anyhow::anyhow!("{e:?}"))?;
if releases.is_empty() {
return Err(anyhow::anyhow!("couldn't retrieve kinode-book releases"));
}
let release = &releases[0];
if release.assets.is_empty() {
return Err(anyhow::anyhow!(
"most recent kinode-book release has no assets"
));
}
let release_url = format!(
"https://github.com/kinode-dao/kinode-book/releases/download/{}/{}",
release.tag_name, release.assets[0].name,
);
let book_dir = packages_dir.join("docs").join("pkg").join("ui");
fs::create_dir_all(&book_dir)?;
let book_tar_path = book_dir.join("book.tar.gz");
kit::build::download_file(&release_url, &book_tar_path)
.await
.map_err(|e| anyhow::anyhow!("{e:?}"))?;
untar_gz_file(&book_tar_path, &book_dir)?;
fs::remove_file(book_tar_path)?;
Ok(())
})
}
fn build_and_zip_package( fn build_and_zip_package(
entry_path: PathBuf, entry_path: PathBuf,
parent_pkg_path: &str, parent_pkg_path: &str,
@ -198,12 +165,6 @@ fn main() -> anyhow::Result<()> {
} }
} }
if std::env::var("SKIP_BOOK").is_ok() {
p!("skipping book build");
} else {
get_kinode_book(&packages_dir)?;
}
output_reruns(&packages_dir); output_reruns(&packages_dir);
let features = get_features(); let features = get_features();

View File

@ -88,6 +88,7 @@ fn make_widget() -> String {
height: 100vh; height: 100vh;
width: 100vw; width: 100vw;
overflow-y: auto; overflow-y: auto;
padding-bottom: 30px;
} }
.app { .app {

View File

@ -1,8 +0,0 @@
*/target/
/target
pkg/*.wasm
*.swp
*.swo
*/wasi_snapshot_preview1.wasm
*/wit/
*/process_env

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
[workspace]
resolver = "2"
members = [
"docs",
]
[profile.release]
panic = "abort"
opt-level = "s"
lto = true

View File

@ -1,18 +0,0 @@
[package]
name = "docs"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0"
kinode_process_lib = "0.9.1"
process_macros = { git = "https://github.com/kinode-dao/process_macros", rev = "626e501" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
wit-bindgen = "0.24.0"
[lib]
crate-type = ["cdylib"]
[package.metadata.component]
package = "kinode:process"

File diff suppressed because one or more lines are too long

View File

@ -1,94 +0,0 @@
use kinode_process_lib::{
await_message, call_init, homepage, http, println, vfs, Address, LazyLoadBlob,
};
wit_bindgen::generate!({
path: "target/wit",
world: "process-v0",
});
const ICON: &str = include_str!("icon");
call_init!(init);
fn init(our: Address) {
println!("begin");
let mut server = http::server::HttpServer::new(5);
// Serve the docs book dynamically from /docs:docs:sys/
server
.bind_http_path("/", http::server::HttpBindingConfig::default())
.expect("failed to bind /");
homepage::add_to_homepage("Docs", Some(ICON), Some("index.html"), None);
loop {
match await_message() {
Err(send_error) => println!("got SendError: {send_error}"),
Ok(ref message) => {
// handle http requests
// no need to validate source since capabilities limit to vfs/http_server
let Ok(request) = server.parse_request(message.body()) else {
continue;
};
server.handle_request(
request,
|incoming| {
// client frontend sent an HTTP request, process it and
// return an HTTP response
// these functions can reuse the logic from handle_local_request
// after converting the request into the appropriate format!
match incoming.method().unwrap_or_default() {
http::Method::GET => {
// serve the page they requested
match vfs::File::new(
format!(
"{}/pkg/ui{}",
our.package_id(),
incoming.path().unwrap_or_default()
),
5,
)
.read()
{
Ok(file) => {
let mime_type = format!(
"text/{}",
incoming
.path()
.unwrap_or_default()
.split('.')
.last()
.unwrap_or("plain")
);
(
http::server::HttpResponse::new(http::StatusCode::OK)
.header("Content-Type", mime_type),
Some(LazyLoadBlob::new(None::<String>, file)),
)
}
Err(e) => (
http::server::HttpResponse::new(
http::StatusCode::NOT_FOUND,
)
.header("Content-Type", "text/html"),
Some(LazyLoadBlob::new(None::<String>, e.to_string())),
),
}
}
_ => (
http::server::HttpResponse::new(
http::StatusCode::METHOD_NOT_ALLOWED,
),
None,
),
}
},
|_channel_id, _message_type, _message| {
// client frontend sent a websocket message, ignore
},
)
}
}
}
}

View File

@ -1,18 +0,0 @@
{
"name": "docs",
"description": "",
"image": "",
"properties": {
"package_name": "docs",
"current_version": "0.1.0",
"publisher": "sys",
"mirrors": [],
"code_hashes": {
"0.1.0": ""
},
"wit_version": 0,
"dependencies": []
},
"external_url": "",
"animation_url": ""
}

View File

@ -1,18 +0,0 @@
[
{
"process_name": "docs",
"process_wasm_path": "/docs.wasm",
"on_exit": "Restart",
"request_networking": false,
"request_capabilities": [
"homepage:homepage:sys",
"http_server:distro:sys",
"vfs:distro:sys"
],
"grant_capabilities": [
"http_server:distro:sys",
"vfs:distro:sys"
],
"public": false
}
]

View File

@ -1,10 +1,9 @@
#![feature(let_chains)]
use crate::kinode::process::homepage::{AddRequest, Request as HomepageRequest}; use crate::kinode::process::homepage::{AddRequest, Request as HomepageRequest};
use kinode_process_lib::{ use kinode_process_lib::{
await_message, call_init, get_blob, http, http::server, println, Address, LazyLoadBlob, await_message, call_init, get_blob, http, http::server, println, Address, LazyLoadBlob,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::BTreeMap; use std::collections::{BTreeMap, HashMap};
/// Fetching OS version from main package /// Fetching OS version from main package
const CARGO_TOML: &str = include_str!("../../../../Cargo.toml"); const CARGO_TOML: &str = include_str!("../../../../Cargo.toml");
@ -26,9 +25,11 @@ struct HomepageApp {
base64_icon: Option<String>, base64_icon: Option<String>,
widget: Option<String>, widget: Option<String>,
order: u32, order: u32,
favorite: bool, favorite: bool, // **not currently used on frontend**
} }
type PersistedAppOrder = HashMap<String, u32>;
wit_bindgen::generate!({ wit_bindgen::generate!({
path: "target/wit", path: "target/wit",
world: "homepage-sys-v0", world: "homepage-sys-v0",
@ -162,6 +163,11 @@ fn init(our: Address) {
.bind_http_path("/order", http_config) .bind_http_path("/order", http_config)
.expect("failed to bind /order"); .expect("failed to bind /order");
// load persisted app order
let mut persisted_app_order =
kinode_process_lib::get_typed_state(|bytes| serde_json::from_slice(bytes))
.unwrap_or(PersistedAppOrder::new());
loop { loop {
let Ok(ref message) = await_message() else { let Ok(ref message) = await_message() else {
// we never send requests, so this will never happen // we never send requests, so this will never happen
@ -238,11 +244,15 @@ fn init(our: Address) {
None, None,
); );
}; };
for (app_id, order) in order_list { for (app_id, order) in &order_list {
if let Some(app) = app_data.get_mut(&app_id) { if let Some(app) = app_data.get_mut(app_id) {
app.order = order; app.order = *order;
} }
} }
persisted_app_order = order_list.into_iter().collect();
kinode_process_lib::set_state(
&serde_json::to_vec(&persisted_app_order).unwrap(),
);
(server::HttpResponse::new(http::StatusCode::OK), None) (server::HttpResponse::new(http::StatusCode::OK), None)
} }
_ => (server::HttpResponse::new(http::StatusCode::NOT_FOUND), None), _ => (server::HttpResponse::new(http::StatusCode::NOT_FOUND), None),
@ -264,10 +274,11 @@ fn init(our: Address) {
path, path,
widget, widget,
}) => { }) => {
let id = message.source().process.to_string();
app_data.insert( app_data.insert(
message.source().process.to_string(), id.clone(),
HomepageApp { HomepageApp {
id: message.source().process.to_string(), id: id.clone(),
process: message.source().process().to_string(), process: message.source().process().to_string(),
package: message.source().package().to_string(), package: message.source().package().to_string(),
publisher: message.source().publisher().to_string(), publisher: message.source().publisher().to_string(),
@ -281,14 +292,20 @@ fn init(our: Address) {
label, label,
base64_icon: icon, base64_icon: icon,
widget, widget,
order: app_data.len() as u32, order: if let Some(order) = persisted_app_order.get(&id) {
*order
} else {
app_data.len() as u32
},
favorite: DEFAULT_FAVES favorite: DEFAULT_FAVES
.contains(&message.source().process.to_string().as_str()), .contains(&message.source().process.to_string().as_str()),
}, },
); );
} }
HomepageRequest::Remove => { HomepageRequest::Remove => {
app_data.remove(&message.source().process.to_string()); let id = message.source().process.to_string();
app_data.remove(&id);
persisted_app_order.remove(&id);
} }
HomepageRequest::SetStylesheet(new_stylesheet_string) => { HomepageRequest::SetStylesheet(new_stylesheet_string) => {
// ONLY settings:settings:sys may call this request // ONLY settings:settings:sys may call this request

View File

@ -1,14 +1,13 @@
import React, { useState, useEffect, useMemo } from "react"; import React, { useState, useEffect, useMemo } from "react";
import useHomepageStore from "../store/homepageStore"; import useHomepageStore, { HomepageApp } from "../store/homepageStore";
import usePersistentStore from "../store/persistentStore";
import AppDisplay from "./AppDisplay"; import AppDisplay from "./AppDisplay";
const AllApps: React.FC = () => { const AllApps: React.FC = () => {
const { apps } = useHomepageStore(); const { apps } = useHomepageStore();
const { appOrder, setAppOrder } = usePersistentStore();
const [expanded, setExpanded] = useState(false); const [expanded, setExpanded] = useState(false);
const [isMobile, setIsMobile] = useState(false); const [isMobile, setIsMobile] = useState(false);
const [visibleApps, setVisibleApps] = useState(5); const [visibleApps, setVisibleApps] = useState(5);
const [orderedApps, setOrderedApps] = useState<HomepageApp[]>([]);
const [draggedIndex, setDraggedIndex] = useState<number | null>(null); const [draggedIndex, setDraggedIndex] = useState<number | null>(null);
const [dragOverIndex, setDragOverIndex] = useState<number | null>(null); const [dragOverIndex, setDragOverIndex] = useState<number | null>(null);
@ -26,13 +25,14 @@ const AllApps: React.FC = () => {
// Sort apps based on persisted order // Sort apps based on persisted order
const sortedApps = useMemo(() => { const sortedApps = useMemo(() => {
const orderedApps = [...apps].sort((a, b) => { if (!orderedApps.length) {
return appOrder.indexOf(a.id) - appOrder.indexOf(b.id); setOrderedApps(apps);
}
const o = [...orderedApps].sort((a, b) => {
return a.order - b.order;
}); });
// Ensure all apps are included in the order return o.filter(app => app.path !== null);
const missingApps = apps.filter((app) => !appOrder.includes(app.id)); }, [orderedApps, apps]);
return [...orderedApps, ...missingApps];
}, [apps, appOrder]);
const displayedApps = expanded const displayedApps = expanded
? sortedApps ? sortedApps
@ -67,28 +67,42 @@ const AllApps: React.FC = () => {
const [movedApp] = newSortedApps.splice(dragIndex, 1); const [movedApp] = newSortedApps.splice(dragIndex, 1);
newSortedApps.splice(dropIndex, 0, movedApp); newSortedApps.splice(dropIndex, 0, movedApp);
const newAppOrder = newSortedApps.map((app) => app.id); const updatedApps = newSortedApps.map((app, index) => ({
setAppOrder(newAppOrder); ...app,
order: index
}));
setOrderedApps(updatedApps);
// Sync the order with the backend
fetch('/order', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
credentials: 'include',
body: JSON.stringify(newSortedApps.map((app, index) => [app.id, index]))
});
handleDragEnd(); handleDragEnd();
}; };
return ( return (
<div id="all-apps" className={isMobile ? "mobile" : ""}> <div id="all-apps" className={isMobile ? "mobile" : ""}>
<div <div
className={`apps-grid ${expanded ? "expanded" : ""} ${ className={`apps-grid ${expanded ? "expanded" : ""} ${isMobile ? "mobile" : ""
isMobile ? "mobile" : ""
}`} }`}
style={{ gridTemplateColumns: `repeat(${Math.min(displayedApps.length, 5)}, 1fr)` }}
> >
{displayedApps.map((app, index) => ( {displayedApps.map((app, index) => (
<div <div
key={app.id} key={`${app.id}-${app.order}`}
draggable draggable
onDragStart={(e) => handleDragStart(e, index)} onDragStart={(e) => handleDragStart(e, index)}
onDragOver={(e) => handleDragOver(e, index)} onDragOver={(e) => handleDragOver(e, index)}
onDragEnd={handleDragEnd} onDragEnd={handleDragEnd}
onDrop={(e) => handleDrop(e, index)} onDrop={(e) => handleDrop(e, index)}
className={`app-wrapper ${ className={`app-wrapper ${draggedIndex === index ? "dragging" : ""
draggedIndex === index ? "dragging" : ""
} ${dragOverIndex === index ? "drag-over" : ""}`} } ${dragOverIndex === index ? "drag-over" : ""}`}
> >
<AppDisplay app={app} /> <AppDisplay app={app} />

View File

@ -1,82 +0,0 @@
import useHomepageStore, { HomepageApp } from "../store/homepageStore"
import usePersistentStore from "../store/persistentStore"
import AppDisplay from "./AppDisplay"
import { useEffect, useState } from "react"
import { DragDropContext, Draggable, DropResult, Droppable } from '@hello-pangea/dnd'
const AppsDock: React.FC = () => {
const { apps } = useHomepageStore()
const { appOrder, setAppOrder } = usePersistentStore()
const [dockedApps, setDockedApps] = useState<HomepageApp[]>([])
useEffect(() => {
// Sort apps based on persisted order
const orderedApps = apps.filter(app => app.favorite).sort((a, b) => {
return appOrder.indexOf(a.id) - appOrder.indexOf(b.id);
});
setDockedApps(orderedApps);
// Sync the order with the backend
fetch('/order', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
credentials: 'include',
body: JSON.stringify(orderedApps.map(app => [app.id, appOrder.indexOf(app.id)]))
});
}, [apps, appOrder])
const onDragEnd = (result: DropResult) => {
if (!result.destination) {
return;
}
const reorderedApps = Array.from(dockedApps);
const [reorderedItem] = reorderedApps.splice(result.source.index, 1);
reorderedApps.splice(result.destination.index, 0, reorderedItem);
const newAppOrder = reorderedApps.map(app => app.id);
setAppOrder(newAppOrder);
setDockedApps(reorderedApps);
fetch('/order', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
credentials: 'include',
body: JSON.stringify(reorderedApps.map((app, index) => [app.id, index]))
});
}
return <DragDropContext onDragEnd={onDragEnd}>
<Droppable droppableId="droppable" direction="horizontal">
{(provided, _snapshot) => (
<div
ref={provided.innerRef}
{...provided.droppableProps}
>
{dockedApps.map((app, index) => <Draggable
key={app.id}
draggableId={app.id}
index={index}
>
{(provided, _snapshot) => (
<div
ref={provided.innerRef}
{...provided.draggableProps}
{...provided.dragHandleProps}
className="docked-app"
>
<AppDisplay app={app} />
</div>
)}
</Draggable>)}
</div>
)}
</Droppable>
</DragDropContext>
}
export default AppsDock

View File

@ -1,7 +1,6 @@
import useHomepageStore, { HomepageApp } from "../store/homepageStore" import useHomepageStore, { HomepageApp } from "../store/homepageStore"
import Widget from "./Widget" import Widget from "./Widget"
import usePersistentStore from "../store/persistentStore" import usePersistentStore from "../store/persistentStore"
import { DragDropContext, Draggable, DropResult, Droppable } from '@hello-pangea/dnd'
import { useEffect, useState } from "react" import { useEffect, useState } from "react"
const Widgets = () => { const Widgets = () => {
@ -9,6 +8,9 @@ const Widgets = () => {
const { widgetSettings, widgetOrder, setWidgetOrder } = usePersistentStore(); const { widgetSettings, widgetOrder, setWidgetOrder } = usePersistentStore();
const [orderedWidgets, setOrderedWidgets] = useState<HomepageApp[]>([]); const [orderedWidgets, setOrderedWidgets] = useState<HomepageApp[]>([]);
const [draggedIndex, setDraggedIndex] = useState<number | null>(null);
const [dragOverIndex, setDragOverIndex] = useState<number | null>(null);
useEffect(() => { useEffect(() => {
const visibleWidgets = apps.filter((app) => app.widget && !widgetSettings[app.id]?.hide); const visibleWidgets = apps.filter((app) => app.widget && !widgetSettings[app.id]?.hide);
const orderedVisibleWidgets = visibleWidgets.sort((a, b) => { const orderedVisibleWidgets = visibleWidgets.sort((a, b) => {
@ -17,51 +19,59 @@ const Widgets = () => {
setOrderedWidgets(orderedVisibleWidgets); setOrderedWidgets(orderedVisibleWidgets);
}, [apps, widgetSettings, widgetOrder]); }, [apps, widgetSettings, widgetOrder]);
const onDragEnd = (result: DropResult) => { const handleDragStart = (e: React.DragEvent, index: number) => {
if (!result.destination) { e.dataTransfer.setData("text/plain", index.toString());
return; setDraggedIndex(index);
} };
const reorderedWidgets = Array.from(orderedWidgets); const handleDragOver = (e: React.DragEvent, index: number) => {
const [reorderedItem] = reorderedWidgets.splice(result.source.index, 1); e.preventDefault();
reorderedWidgets.splice(result.destination.index, 0, reorderedItem); setDragOverIndex(index);
};
const newWidgetOrder = reorderedWidgets.map(widget => widget.id); const handleDragEnd = () => {
setDraggedIndex(null);
setDragOverIndex(null);
};
const handleDrop = (e: React.DragEvent, dropIndex: number) => {
e.preventDefault();
const dragIndex = parseInt(e.dataTransfer.getData("text/plain"), 10);
if (dragIndex === dropIndex) return;
const newSortedWidgets = [...orderedWidgets];
const [movedWidget] = newSortedWidgets.splice(dragIndex, 1);
newSortedWidgets.splice(dropIndex, 0, movedWidget);
const newWidgetOrder = newSortedWidgets.map((wid) => wid.id);
setWidgetOrder(newWidgetOrder); setWidgetOrder(newWidgetOrder);
setOrderedWidgets(reorderedWidgets); handleDragEnd();
} };
return ( return (
<DragDropContext onDragEnd={onDragEnd}>
<Droppable droppableId="widgets">
{(provided, _snapshot) => (
<div <div
id="widgets" id="widgets"
ref={provided.innerRef}
{...provided.droppableProps}
> >
{orderedWidgets.map((app, index) => ( {orderedWidgets.map((wid, index) => (
<Draggable key={app.id} draggableId={app.id} index={index}>
{(provided, _snapshot) => (
<div <div
ref={provided.innerRef} key={wid.id}
{...provided.draggableProps} draggable
{...provided.dragHandleProps} onDragStart={(e) => handleDragStart(e, index)}
onDragOver={(e) => handleDragOver(e, index)}
onDragEnd={handleDragEnd}
onDrop={(e) => handleDrop(e, index)}
className={`widget-wrapper ${draggedIndex === index ? "dragging" : ""
} ${dragOverIndex === index ? "drag-over" : ""}`}
> >
<Widget <Widget
id={app.id} id={wid.id}
label={app.label} label={wid.label}
widget={app.widget!} widget={wid.widget!}
/> />
<div className="drag-handle"></div>
</div> </div>
)}
</Draggable>
))} ))}
{provided.placeholder}
</div> </div>
)}
</Droppable>
</DragDropContext>
); );
} }

View File

@ -62,13 +62,6 @@ header button {
margin-left: 4px; margin-left: 4px;
} }
#widgets-header {
color: light-dark(var(--off-white), var(--tasteful-dark));
border-color: light-dark(var(--off-white), var(--tasteful-dark));
padding: 1em 0em 1em 0em;
border-bottom: 1px solid light-dark(var(--tasteful-dark), var(--off-white));
}
@media (prefers-color-scheme: light) { @media (prefers-color-scheme: light) {
.kino-bird { .kino-bird {
filter: invert(1); filter: invert(1);
@ -77,19 +70,6 @@ header button {
@media (prefers-color-scheme: dark) {} @media (prefers-color-scheme: dark) {}
[data-rfd-droppable-context-id] {
display: flex;
flex-direction: row;
flex-wrap: wrap;
gap: 10px;
justify-content: center;
background-color: light-dark(#4f000055, var(--tasteful-dark));
padding: 10px;
border-radius: 10px;
margin: 0 auto;
margin-bottom: 10px;
}
.no-ui { .no-ui {
position: absolute; position: absolute;
bottom: 0; bottom: 0;
@ -111,6 +91,7 @@ header button {
} }
#widgets { #widgets {
margin-top: 1em;
width: 100%; width: 100%;
display: flex; display: flex;
flex-direction: row; flex-direction: row;
@ -178,7 +159,6 @@ footer {
.apps-grid { .apps-grid {
display: grid; display: grid;
grid-template-columns: repeat(5, 1fr);
width: 100%; width: 100%;
color: var(--off-white); color: var(--off-white);
} }
@ -193,7 +173,7 @@ footer {
border-radius: 0 0 1em 1em; border-radius: 0 0 1em 1em;
border: 0.5px solid rgba(255, 255, 255, 0.2); border: 0.5px solid rgba(255, 255, 255, 0.2);
padding: 1em; padding: 1em;
color: light-dark(var(--white), var(--tasteful-dark)); color: var(--white);
} }
@media (max-width: 1024px) { @media (max-width: 1024px) {
@ -302,10 +282,19 @@ footer {
transition: transform 0.2s ease; transition: transform 0.2s ease;
} }
.widget-wrapper {
position: relative;
transition: transform 0.2s ease;
}
.app-wrapper:hover .drag-handle { .app-wrapper:hover .drag-handle {
opacity: 1; opacity: 1;
} }
.widget-wrapper:hover .drag-handle {
opacity: 1;
}
.drag-handle { .drag-handle {
position: absolute; position: absolute;
top: 5px; top: 5px;
@ -313,6 +302,9 @@ footer {
cursor: move; cursor: move;
opacity: 0; opacity: 0;
transition: opacity 0.2s ease; transition: opacity 0.2s ease;
color: var(--white);
font-size: 1.2em;
text-shadow: -1px 1px 0px #000;
} }
.dragging { .dragging {

View File

@ -46,13 +46,13 @@ function Homepage() {
<KinodeBird /> <KinodeBird />
<h2> <h2>
{new Date().getHours() < 4 {new Date().getHours() < 4
? "Good evening" ? "Good evening" // midnight to 4am
: new Date().getHours() < 12 : new Date().getHours() < 12
? "Good morning" ? "Good morning" // 4am to 11am
: new Date().getHours() < 18 : new Date().getHours() < 18
? "Good afternoon" ? "Good afternoon" // 12pm to 5pm
: "Good evening"} : "Good evening" // 5pm to midnight
, {our} }, {our}
</h2> </h2>
<a <a
href="https://github.com/kinode-dao/kinode/releases" href="https://github.com/kinode-dao/kinode/releases"
@ -60,11 +60,6 @@ function Homepage() {
> >
[kinode v{version}] [kinode v{version}]
</a> </a>
</header>
<div id="widgets-container">
<header id="widgets-header">
<h2>Widgets</h2>
<a <a
href="#" href="#"
onClick={(e) => { onClick={(e) => {
@ -75,6 +70,8 @@ function Homepage() {
[] []
</a> </a>
</header> </header>
<div id="widgets-container">
<Widgets /> <Widgets />
</div> </div>
<footer> <footer>

View File

@ -87,6 +87,7 @@ fn create_widget(posts: Vec<KinodeBlogPost>) -> String {
scrollbar-color: transparent transparent; scrollbar-color: transparent transparent;
scrollbar-width: none; scrollbar-width: none;
align-self: stretch; align-self: stretch;
padding-bottom: 30px;
}} }}
.post {{ .post {{

View File

@ -78,7 +78,9 @@ async fn main() {
let password = matches.get_one::<String>("password"); let password = matches.get_one::<String>("password");
// logging mode is toggled at runtime by CTRL+L // logging mode is toggled at runtime by CTRL+L
let is_logging = *matches.get_one::<bool>("logging").unwrap(); let is_logging = !*matches.get_one::<bool>("logging-off").unwrap();
let max_log_size = matches.get_one::<u64>("max-log-size");
let number_log_files = matches.get_one::<u64>("number-log-files");
// detached determines whether terminal is interactive // detached determines whether terminal is interactive
let detached = *matches.get_one::<bool>("detached").unwrap(); let detached = *matches.get_one::<bool>("detached").unwrap();
@ -427,6 +429,8 @@ async fn main() {
detached, detached,
verbose_mode, verbose_mode,
is_logging, is_logging,
max_log_size.copied(),
number_log_files.copied(),
) => { ) => {
match quit { match quit {
Ok(()) => { Ok(()) => {
@ -653,7 +657,7 @@ fn build_command() -> Command {
.value_parser(value_parser!(u8)), .value_parser(value_parser!(u8)),
) )
.arg( .arg(
arg!(-l --logging <IS_LOGGING> "Run in logging mode (toggled at runtime by CTRL+L): write all terminal output to .terminal_log file") arg!(-l --"logging-off" <IS_NOT_LOGGING> "Run in non-logging mode (toggled at runtime by CTRL+L): do not write all terminal output to file in .terminal_logs directory")
.action(clap::ArgAction::SetTrue), .action(clap::ArgAction::SetTrue),
) )
.arg( .arg(
@ -666,7 +670,15 @@ fn build_command() -> Command {
.action(clap::ArgAction::SetTrue), .action(clap::ArgAction::SetTrue),
) )
.arg(arg!(--rpc <RPC> "Add a WebSockets RPC URL at boot")) .arg(arg!(--rpc <RPC> "Add a WebSockets RPC URL at boot"))
.arg(arg!(--password <PASSWORD> "Node password (in double quotes)")); .arg(arg!(--password <PASSWORD> "Node password (in double quotes)"))
.arg(
arg!(--"max-log-size" <MAX_LOG_SIZE_BYTES> "Max size of all logs in bytes; setting to 0 -> no size limit (default 16MB)")
.value_parser(value_parser!(u64)),
)
.arg(
arg!(--"number-log-files" <NUMBER_LOG_FILES> "Number of logs to rotate (default 4)")
.value_parser(value_parser!(u64)),
);
#[cfg(feature = "simulation-mode")] #[cfg(feature = "simulation-mode")]
let app = app let app = app

View File

@ -13,7 +13,7 @@ use lib::types::core::{
}; };
use std::{ use std::{
fs::{read_to_string, OpenOptions}, fs::{read_to_string, OpenOptions},
io::{BufWriter, Write}, io::BufWriter,
}; };
use tokio::signal::unix::{signal, SignalKind}; use tokio::signal::unix::{signal, SignalKind};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
@ -22,8 +22,8 @@ pub mod utils;
struct State { struct State {
pub stdout: std::io::Stdout, pub stdout: std::io::Stdout,
/// handle for writing to on-disk log (disabled by default, triggered by CTRL+L) /// handle and settings for on-disk log (disabled by default, triggered by CTRL+L)
pub log_writer: BufWriter<std::fs::File>, pub logger: utils::Logger,
/// in-memory searchable command history that persists itself on disk (default size: 1000) /// in-memory searchable command history that persists itself on disk (default size: 1000)
pub command_history: utils::CommandHistory, pub command_history: utils::CommandHistory,
/// terminal window width, 0 is leftmost column /// terminal window width, 0 is leftmost column
@ -182,6 +182,8 @@ pub async fn terminal(
is_detached: bool, is_detached: bool,
verbose_mode: u8, verbose_mode: u8,
is_logging: bool, is_logging: bool,
max_log_size: Option<u64>,
number_log_files: Option<u64>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let (stdout, _maybe_raw_mode) = utils::splash(&our, version, is_detached)?; let (stdout, _maybe_raw_mode) = utils::splash(&our, version, is_detached)?;
@ -214,20 +216,15 @@ pub async fn terminal(
// if CTRL+L is used to turn on logging, all prints to terminal // if CTRL+L is used to turn on logging, all prints to terminal
// will also be written with their full timestamp to the .terminal_log file. // will also be written with their full timestamp to the .terminal_log file.
// logging mode is always off by default. TODO add a boot flag to change this. // logging mode is always on by default
let log_path = std::fs::canonicalize(&home_directory_path) let log_dir_path = std::fs::canonicalize(&home_directory_path)
.expect("terminal: could not get path for .terminal_log file") .expect("terminal: could not get path for .terminal_logs dir")
.join(".terminal_log"); .join(".terminal_logs");
let log_handle = OpenOptions::new() let logger = utils::Logger::new(log_dir_path, max_log_size, number_log_files);
.append(true)
.create(true)
.open(&log_path)
.expect("terminal: could not open/create .terminal_log");
let log_writer = BufWriter::new(log_handle);
let mut state = State { let mut state = State {
stdout, stdout,
log_writer, logger,
command_history, command_history,
win_cols, win_cols,
win_rows, win_rows,
@ -320,21 +317,16 @@ fn handle_printout(printout: Printout, state: &mut State) -> anyhow::Result<()>
// lock here so that runtime can still use println! without freezing.. // lock here so that runtime can still use println! without freezing..
// can lock before loop later if we want to reduce overhead // can lock before loop later if we want to reduce overhead
let mut stdout = state.stdout.lock(); let mut stdout = state.stdout.lock();
let now = Local::now();
// always write print to log if in logging mode // always write print to log if in logging mode
if state.logging_mode { if state.logging_mode {
writeln!( state.logger.write(&printout.content)?;
state.log_writer,
"[{}] {}",
now.to_rfc2822(),
printout.content
)?;
} }
// skip writing print to terminal if it's of a greater // skip writing print to terminal if it's of a greater
// verbosity level than our current mode // verbosity level than our current mode
if printout.verbosity > state.verbose_mode { if printout.verbosity > state.verbose_mode {
return Ok(()); return Ok(());
} }
let now = Local::now();
execute!( execute!(
stdout, stdout,
// print goes immediately above the dedicated input line at bottom // print goes immediately above the dedicated input line at bottom

View File

@ -2,12 +2,16 @@ use crossterm::terminal::{disable_raw_mode, enable_raw_mode};
use lib::types::core::Identity; use lib::types::core::Identity;
use std::{ use std::{
collections::VecDeque, collections::VecDeque,
fs::File, fs::{File, OpenOptions},
io::{BufWriter, Stdout, Write}, io::{BufWriter, Stdout, Write},
path::{Path, PathBuf},
}; };
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use unicode_width::UnicodeWidthStr; use unicode_width::UnicodeWidthStr;
const DEFAULT_MAX_LOGS_BYTES: u64 = 16_000_000;
const DEFAULT_NUMBER_LOG_FILES: u64 = 4;
pub struct RawMode; pub struct RawMode;
impl RawMode { impl RawMode {
fn new() -> std::io::Result<Self> { fn new() -> std::io::Result<Self> {
@ -331,3 +335,113 @@ pub fn truncate_in_place(
.collect::<String>() .collect::<String>()
} }
} }
pub struct Logger {
pub log_dir_path: PathBuf,
pub strategy: LoggerStrategy,
log_writer: BufWriter<std::fs::File>,
}
pub enum LoggerStrategy {
Rotating {
max_log_dir_bytes: u64,
number_log_files: u64,
},
Infinite,
}
impl LoggerStrategy {
fn new(max_log_size: Option<u64>, number_log_files: Option<u64>) -> Self {
let max_log_size = max_log_size.unwrap_or_else(|| DEFAULT_MAX_LOGS_BYTES);
let number_log_files = number_log_files.unwrap_or_else(|| DEFAULT_NUMBER_LOG_FILES);
if max_log_size == 0 {
LoggerStrategy::Infinite
} else {
LoggerStrategy::Rotating {
max_log_dir_bytes: max_log_size,
number_log_files,
}
}
}
}
impl Logger {
pub fn new(
log_dir_path: PathBuf,
max_log_size: Option<u64>,
number_log_files: Option<u64>,
) -> Self {
let log_writer = make_log_writer(&log_dir_path).unwrap();
Self {
log_dir_path,
log_writer,
strategy: LoggerStrategy::new(max_log_size, number_log_files),
}
}
pub fn write(&mut self, line: &str) -> anyhow::Result<()> {
let now = chrono::Local::now();
let line = &format!("[{}] {}", now.to_rfc2822(), line);
match self.strategy {
LoggerStrategy::Infinite => {}
LoggerStrategy::Rotating {
max_log_dir_bytes,
number_log_files,
} => {
// check whether to rotate
let line_bytes = line.len();
let file_bytes = self.log_writer.get_ref().metadata()?.len() as usize;
if line_bytes + file_bytes >= (max_log_dir_bytes / number_log_files) as usize {
// rotate
self.log_writer = make_log_writer(&self.log_dir_path)?;
// clean up oldest if necessary
remove_oldest_if_exceeds(&self.log_dir_path, number_log_files as usize)?;
}
}
}
writeln!(self.log_writer, "{}", line)?;
Ok(())
}
}
fn make_log_writer(log_dir_path: &Path) -> anyhow::Result<BufWriter<std::fs::File>> {
if !log_dir_path.exists() {
std::fs::create_dir(log_dir_path)?;
}
let now = chrono::Local::now();
let log_name = format!("{}.log", now.format("%Y-%m-%d-%H:%M:%S"));
let log_path = log_dir_path.join(log_name);
let log_handle = OpenOptions::new()
.append(true)
.create(true)
.open(&log_path)?;
Ok(BufWriter::new(log_handle))
}
fn remove_oldest_if_exceeds<P: AsRef<Path>>(path: P, max_items: usize) -> anyhow::Result<()> {
let mut entries = Vec::new();
// Collect all entries and their modification times
for entry in std::fs::read_dir(path)? {
let entry = entry?;
if let Ok(metadata) = entry.metadata() {
if let Ok(modified) = metadata.modified() {
entries.push((modified, entry.path()));
}
}
}
// If the number of entries exceeds the max_items, remove the oldest
while entries.len() > max_items {
// Sort entries by modification time (oldest first)
entries.sort_by_key(|e| e.0);
let (_, path) = entries.remove(0);
std::fs::remove_file(&path)?;
}
Ok(())
}

View File

@ -1,7 +1,7 @@
[package] [package]
name = "lib" name = "lib"
authors = ["KinodeDAO"] authors = ["KinodeDAO"]
version = "0.9.4" version = "0.9.5"
edition = "2021" edition = "2021"
description = "A general-purpose sovereign cloud computing platform" description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org" homepage = "https://kinode.org"
@ -11,7 +11,7 @@ license = "Apache-2.0"
[lib] [lib]
[build-dependencies] [build-dependencies]
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.4" } kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.7.6" }
tokio = "1.28" tokio = "1.28"
[dependencies] [dependencies]