mirror of
https://github.com/uqbar-dao/nectar.git
synced 2024-11-23 03:44:04 +03:00
Merge branch 'release-candidate' into develop
This commit is contained in:
commit
1b8c5eb447
1
.github/workflows/build_release.yml
vendored
1
.github/workflows/build_release.yml
vendored
@ -7,7 +7,6 @@ on:
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/main'
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
|
37
Cargo.lock
generated
37
Cargo.lock
generated
@ -3233,14 +3233,13 @@ dependencies = [
|
||||
"hmac",
|
||||
"http 1.1.0",
|
||||
"jwt",
|
||||
"kit",
|
||||
"kit 0.6.7",
|
||||
"lazy_static",
|
||||
"lib",
|
||||
"nohash-hasher",
|
||||
"open",
|
||||
"public-ip",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
"reqwest 0.12.5",
|
||||
"ring",
|
||||
"rmp-serde",
|
||||
@ -3377,6 +3376,38 @@ dependencies = [
|
||||
"zip 0.6.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kit"
|
||||
version = "0.6.7"
|
||||
source = "git+https://github.com/kinode-dao/kit?rev=4a8999f#4a8999f90b69381e94d11fb5aa1b62215a9db95b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.7",
|
||||
"clap",
|
||||
"color-eyre",
|
||||
"dirs 5.0.1",
|
||||
"fs-err",
|
||||
"git2",
|
||||
"hex",
|
||||
"kinode_process_lib 0.8.0",
|
||||
"nix",
|
||||
"regex",
|
||||
"reqwest 0.11.27",
|
||||
"semver 1.0.23",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"tokio",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-error",
|
||||
"tracing-subscriber",
|
||||
"walkdir",
|
||||
"wit-bindgen",
|
||||
"zip 0.6.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kns_indexer"
|
||||
version = "0.2.0"
|
||||
@ -3416,7 +3447,7 @@ name = "lib"
|
||||
version = "0.9.0"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"kit",
|
||||
"kit 0.6.2",
|
||||
"lazy_static",
|
||||
"rand 0.8.5",
|
||||
"ring",
|
||||
|
@ -14,8 +14,7 @@ path = "src/main.rs"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = "1.0.71"
|
||||
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.6.2" }
|
||||
rayon = "1.8.1"
|
||||
kit = { git = "https://github.com/kinode-dao/kit", rev = "4a8999f" }
|
||||
tokio = "1.28"
|
||||
walkdir = "2.4"
|
||||
zip = "0.6"
|
||||
|
@ -1,4 +1,3 @@
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
fs::{self, File},
|
||||
@ -16,37 +15,30 @@ fn get_features() -> String {
|
||||
.to_lowercase()
|
||||
.replace("_", "-");
|
||||
features.push_str(&feature);
|
||||
//println!("cargo:rustc-cfg=feature=\"{}\"", feature);
|
||||
//println!("- {}", feature);
|
||||
}
|
||||
}
|
||||
features
|
||||
}
|
||||
|
||||
fn output_reruns(dir: &Path, rerun_files: &HashSet<String>) {
|
||||
if rerun_files.contains(dir.to_str().unwrap()) {
|
||||
// Output for all files in the directory if the directory itself is specified in rerun_files
|
||||
if let Ok(entries) = fs::read_dir(dir) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let path = entry.path();
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check files individually
|
||||
if let Ok(entries) = fs::read_dir(dir) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
// If the entry is a directory, recursively walk it
|
||||
output_reruns(&path, rerun_files);
|
||||
} else if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
|
||||
// Check if the current file is in our list of interesting files
|
||||
if rerun_files.contains(filename) {
|
||||
// If so, print a `cargo:rerun-if-changed=PATH` line for it
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
// Check files individually
|
||||
if let Ok(entries) = fs::read_dir(dir) {
|
||||
for entry in entries.filter_map(|e| e.ok()) {
|
||||
let path = entry.path();
|
||||
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
|
||||
// Check if the current file is in our list of interesting files
|
||||
if filename == "ui" {
|
||||
continue;
|
||||
}
|
||||
if rerun_files.contains(filename) {
|
||||
// If so, print a `cargo:rerun-if-changed=PATH` line for it
|
||||
println!("cargo::rerun-if-changed={}", path.display());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if path.is_dir() {
|
||||
// If the entry is a directory not in rerun_files, recursively walk it
|
||||
output_reruns(&path, rerun_files);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -59,13 +51,23 @@ fn build_and_zip_package(
|
||||
) -> anyhow::Result<(String, String, Vec<u8>)> {
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
rt.block_on(async {
|
||||
kit::build::execute(&entry_path, true, false, true, features, None, None, true)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
|
||||
kit::build::execute(
|
||||
&entry_path,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
features,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
|
||||
|
||||
let mut writer = Cursor::new(Vec::new());
|
||||
let options = FileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Stored)
|
||||
.compression_method(zip::CompressionMethod::Deflated)
|
||||
.unix_permissions(0o755);
|
||||
{
|
||||
let mut zip = zip::ZipWriter::new(&mut writer);
|
||||
@ -111,14 +113,14 @@ fn main() -> anyhow::Result<()> {
|
||||
let rerun_files: HashSet<String> = HashSet::from([
|
||||
"Cargo.lock".to_string(),
|
||||
"Cargo.toml".to_string(),
|
||||
"src/".to_string(),
|
||||
"src".to_string(),
|
||||
]);
|
||||
output_reruns(&parent_dir, &rerun_files);
|
||||
|
||||
let features = get_features();
|
||||
|
||||
let results: Vec<anyhow::Result<(String, String, Vec<u8>)>> = entries
|
||||
.par_iter()
|
||||
.iter()
|
||||
.filter_map(|entry_path| {
|
||||
let parent_pkg_path = entry_path.join("pkg");
|
||||
if !parent_pkg_path.exists() {
|
||||
@ -160,7 +162,11 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
writeln!(bootstrapped_processes, "];")?;
|
||||
let bootstrapped_processes_path = pwd.join("src/bootstrapped_processes.rs");
|
||||
let target_dir = pwd.join("../target");
|
||||
if !target_dir.exists() {
|
||||
fs::create_dir_all(&target_dir)?;
|
||||
}
|
||||
let bootstrapped_processes_path = target_dir.join("bootstrapped_processes.rs");
|
||||
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
|
||||
|
||||
Ok(())
|
||||
|
@ -80,7 +80,6 @@ pub fn app_store_filter(state: &State) -> eth::Filter {
|
||||
|
||||
eth::Filter::new()
|
||||
.address(eth::Address::from_str(&state.contract_address).unwrap())
|
||||
.from_block(state.last_saved_block)
|
||||
.events(EVENTS)
|
||||
.topic3(notes)
|
||||
}
|
||||
@ -89,7 +88,10 @@ pub fn app_store_filter(state: &State) -> eth::Filter {
|
||||
pub fn fetch_and_subscribe_logs(state: &mut State) {
|
||||
let filter = app_store_filter(state);
|
||||
// get past logs, subscribe to new ones.
|
||||
for log in fetch_logs(&state.provider, &filter) {
|
||||
for log in fetch_logs(
|
||||
&state.provider,
|
||||
&filter.clone().from_block(state.last_saved_block),
|
||||
) {
|
||||
if let Err(e) = state.ingest_contract_event(log, false) {
|
||||
println!("error ingesting log: {e:?}");
|
||||
};
|
||||
|
@ -6,8 +6,8 @@ wit_bindgen::generate!({
|
||||
world: "process-v0",
|
||||
});
|
||||
|
||||
/// 20 minutes
|
||||
const REFRESH_INTERVAL: u64 = 20 * 60 * 1000;
|
||||
/// 2 hours
|
||||
const REFRESH_INTERVAL: u64 = 120 * 60 * 1000;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct KinodeBlogPost {
|
||||
@ -149,8 +149,13 @@ fn fetch_most_recent_blog_posts(n: usize) -> Vec<KinodeBlogPost> {
|
||||
60,
|
||||
vec![],
|
||||
) {
|
||||
Ok(response) => serde_json::from_slice::<Vec<KinodeBlogPost>>(response.body())
|
||||
.expect("Invalid UTF-8 from kinode.org"),
|
||||
Ok(response) => match serde_json::from_slice::<Vec<KinodeBlogPost>>(response.body()) {
|
||||
Ok(posts) => posts,
|
||||
Err(e) => {
|
||||
println!("Failed to parse blog posts: {e:?}");
|
||||
vec![]
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Failed to fetch blog posts: {e:?}");
|
||||
vec![]
|
||||
|
File diff suppressed because one or more lines are too long
@ -546,12 +546,22 @@ async fn http_handler(
|
||||
.into_response());
|
||||
}
|
||||
if request_subdomain != subdomain {
|
||||
let query_string = if !query_params.is_empty() {
|
||||
let params: Vec<String> = query_params
|
||||
.iter()
|
||||
.map(|(key, value)| format!("{}={}", key, value))
|
||||
.collect();
|
||||
format!("?{}", params.join("&"))
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
return Ok(warp::http::Response::builder()
|
||||
.status(StatusCode::TEMPORARY_REDIRECT)
|
||||
.header(
|
||||
"Location",
|
||||
format!(
|
||||
"{}://{}.{}{}",
|
||||
"{}://{}.{}{}{}",
|
||||
match headers.get("X-Forwarded-Proto") {
|
||||
Some(proto) => proto.to_str().unwrap_or("http"),
|
||||
None => "http",
|
||||
@ -559,6 +569,7 @@ async fn http_handler(
|
||||
subdomain,
|
||||
host,
|
||||
original_path,
|
||||
query_string,
|
||||
),
|
||||
)
|
||||
.body(vec![])
|
||||
@ -584,28 +595,10 @@ async fn http_handler(
|
||||
&jwt_secret_bytes,
|
||||
) {
|
||||
// redirect to login page so they can get an auth token
|
||||
if original_path == "" {
|
||||
return Ok(warp::http::Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(login_html.to_string())
|
||||
.into_response());
|
||||
} else {
|
||||
return Ok(warp::http::Response::builder()
|
||||
.status(StatusCode::TEMPORARY_REDIRECT)
|
||||
.header(
|
||||
"Location",
|
||||
format!(
|
||||
"{}://{}",
|
||||
match headers.get("X-Forwarded-Proto") {
|
||||
Some(proto) => proto.to_str().unwrap_or("http"),
|
||||
None => "http",
|
||||
},
|
||||
host,
|
||||
),
|
||||
)
|
||||
.body(vec![])
|
||||
.into_response());
|
||||
}
|
||||
return Ok(warp::http::Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(login_html.to_string())
|
||||
.into_response());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -705,6 +705,11 @@ pub async fn assign_routing(
|
||||
));
|
||||
}
|
||||
|
||||
if !our.is_direct() {
|
||||
// indirect node
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if ip.is_ok() && (ws.is_ok() || tcp.is_ok()) {
|
||||
// direct node
|
||||
let mut ports = std::collections::BTreeMap::new();
|
||||
|
@ -14,7 +14,7 @@ use std::{
|
||||
};
|
||||
use tokio::{fs, io::AsyncWriteExt, sync::Mutex};
|
||||
|
||||
include!("bootstrapped_processes.rs");
|
||||
include!("../../target/bootstrapped_processes.rs");
|
||||
|
||||
pub async fn load_state(
|
||||
our_name: String,
|
||||
|
@ -449,9 +449,9 @@ pub async fn terminal(
|
||||
)?;
|
||||
},
|
||||
//
|
||||
// BACKSPACE or DELETE: delete a single character at cursor
|
||||
// BACKSPACE: delete a single character at cursor
|
||||
//
|
||||
KeyCode::Backspace | KeyCode::Delete => {
|
||||
KeyCode::Backspace => {
|
||||
if line_col == prompt_len {
|
||||
continue;
|
||||
}
|
||||
@ -482,6 +482,35 @@ pub async fn terminal(
|
||||
)?;
|
||||
},
|
||||
//
|
||||
// DELETE: delete a single character at right of cursor
|
||||
//
|
||||
KeyCode::Delete => {
|
||||
if line_col == current_line.len() {
|
||||
continue;
|
||||
}
|
||||
current_line.remove(line_col);
|
||||
if search_mode {
|
||||
utils::execute_search(
|
||||
&our,
|
||||
&mut stdout,
|
||||
¤t_line,
|
||||
prompt_len,
|
||||
(win_cols, win_rows),
|
||||
(line_col, cursor_col),
|
||||
&mut command_history,
|
||||
search_depth,
|
||||
)?;
|
||||
continue;
|
||||
}
|
||||
execute!(
|
||||
stdout,
|
||||
cursor::MoveTo(0, win_rows),
|
||||
terminal::Clear(ClearType::CurrentLine),
|
||||
Print(utils::truncate_in_place(¤t_line, prompt_len, win_cols, (line_col, cursor_col))),
|
||||
cursor::MoveTo(cursor_col, win_rows),
|
||||
)?;
|
||||
}
|
||||
//
|
||||
// LEFT: move cursor one spot left
|
||||
//
|
||||
KeyCode::Left => {
|
||||
@ -592,7 +621,7 @@ pub async fn terminal(
|
||||
_ = sigalrm.recv() => return Err(anyhow::anyhow!("exiting due to SIGALRM")),
|
||||
_ = sighup.recv() => return Err(anyhow::anyhow!("exiting due to SIGHUP")),
|
||||
_ = sigint.recv() => return Err(anyhow::anyhow!("exiting due to SIGINT")),
|
||||
_ = sigpipe.recv() => return Err(anyhow::anyhow!("exiting due to SIGPIPE")),
|
||||
_ = sigpipe.recv() => continue, // IGNORE SIGPIPE!
|
||||
_ = sigquit.recv() => return Err(anyhow::anyhow!("exiting due to SIGQUIT")),
|
||||
_ = sigterm.recv() => return Err(anyhow::anyhow!("exiting due to SIGTERM")),
|
||||
_ = sigusr1.recv() => return Err(anyhow::anyhow!("exiting due to SIGUSR1")),
|
||||
|
@ -3,6 +3,7 @@ use ring::signature;
|
||||
use rusqlite::types::{FromSql, FromSqlError, ToSql, ValueRef};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use thiserror::Error;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
@ -470,7 +471,7 @@ pub enum Message {
|
||||
Response((Response, Option<Context>)),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Capability {
|
||||
pub issuer: Address,
|
||||
pub params: String,
|
||||
@ -488,6 +489,14 @@ impl PartialEq for Capability {
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Capability {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.issuer.hash(state);
|
||||
let params: serde_json::Value = serde_json::from_str(&self.params).unwrap_or_default();
|
||||
params.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Capability {
|
||||
pub fn new<T, U>(issuer: T, params: U) -> Self
|
||||
where
|
||||
|
Loading…
Reference in New Issue
Block a user