Merge pull request #449 from kinode-dao/release-candidate

Release candidate
This commit is contained in:
doria 2024-07-17 18:29:10 +09:00 committed by GitHub
commit 4123bd184c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 91 additions and 54 deletions

43
Cargo.lock generated
View File

@ -3212,7 +3212,7 @@ dependencies = [
[[package]]
name = "kinode"
version = "0.8.3"
version = "0.8.4"
dependencies = [
"aes-gcm",
"alloy",
@ -3233,14 +3233,13 @@ dependencies = [
"hmac",
"http 1.1.0",
"jwt",
"kit",
"kit 0.6.7",
"lazy_static",
"lib",
"nohash-hasher",
"open",
"public-ip",
"rand 0.8.5",
"rayon",
"reqwest 0.12.5",
"ring",
"rmp-serde",
@ -3269,7 +3268,7 @@ dependencies = [
[[package]]
name = "kinode_lib"
version = "0.8.3"
version = "0.8.4"
dependencies = [
"lib",
]
@ -3355,6 +3354,38 @@ dependencies = [
"zip 0.6.6",
]
[[package]]
name = "kit"
version = "0.6.7"
source = "git+https://github.com/kinode-dao/kit?rev=4a8999f#4a8999f90b69381e94d11fb5aa1b62215a9db95b"
dependencies = [
"anyhow",
"base64 0.21.7",
"clap",
"color-eyre",
"dirs 5.0.1",
"fs-err",
"git2",
"hex",
"kinode_process_lib 0.8.0 (git+https://github.com/kinode-dao/process_lib.git?rev=7eb3a04)",
"nix",
"regex",
"reqwest 0.11.27",
"semver 1.0.23",
"serde",
"serde_json",
"sha2",
"tokio",
"toml",
"tracing",
"tracing-appender",
"tracing-error",
"tracing-subscriber",
"walkdir",
"wit-bindgen",
"zip 0.6.6",
]
[[package]]
name = "kns_indexer"
version = "0.3.0"
@ -3391,10 +3422,10 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "lib"
version = "0.8.3"
version = "0.8.4"
dependencies = [
"alloy",
"kit",
"kit 0.6.2",
"lazy_static",
"rand 0.8.5",
"ring",

View File

@ -1,7 +1,7 @@
[package]
name = "kinode_lib"
authors = ["KinodeDAO"]
version = "0.8.3"
version = "0.8.4"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"

View File

@ -1,7 +1,7 @@
[package]
name = "kinode"
authors = ["KinodeDAO"]
version = "0.8.3"
version = "0.8.4"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"
@ -14,8 +14,7 @@ path = "src/main.rs"
[build-dependencies]
anyhow = "1.0.71"
kit = { git = "https://github.com/kinode-dao/kit", tag = "v0.6.2" }
rayon = "1.8.1"
kit = { git = "https://github.com/kinode-dao/kit", rev = "4a8999f" }
tokio = "1.28"
walkdir = "2.4"
zip = "0.6"

View File

@ -1,4 +1,3 @@
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{
collections::HashSet,
fs::{self, File},
@ -16,37 +15,30 @@ fn get_features() -> String {
.to_lowercase()
.replace("_", "-");
features.push_str(&feature);
//println!("cargo:rustc-cfg=feature=\"{}\"", feature);
//println!("- {}", feature);
}
}
features
}
fn output_reruns(dir: &Path, rerun_files: &HashSet<String>) {
if rerun_files.contains(dir.to_str().unwrap()) {
// Output for all files in the directory if the directory itself is specified in rerun_files
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
println!("cargo:rerun-if-changed={}", path.display());
}
}
} else {
// Check files individually
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if path.is_dir() {
// If the entry is a directory, recursively walk it
output_reruns(&path, rerun_files);
} else if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
// Check if the current file is in our list of interesting files
if rerun_files.contains(filename) {
// If so, print a `cargo:rerun-if-changed=PATH` line for it
println!("cargo:rerun-if-changed={}", path.display());
}
// Check files individually
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
// Check if the current file is in our list of interesting files
if filename == "ui" {
continue;
}
if rerun_files.contains(filename) {
// If so, print a `cargo:rerun-if-changed=PATH` line for it
println!("cargo::rerun-if-changed={}", path.display());
continue;
}
}
if path.is_dir() {
// If the entry is a directory not in rerun_files, recursively walk it
output_reruns(&path, rerun_files);
}
}
}
@ -59,13 +51,23 @@ fn build_and_zip_package(
) -> anyhow::Result<(String, String, Vec<u8>)> {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
kit::build::execute(&entry_path, true, false, true, features, None, None, true)
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
kit::build::execute(
&entry_path,
true,
false,
true,
features,
None,
None,
None,
true,
)
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;
let mut writer = Cursor::new(Vec::new());
let options = FileOptions::default()
.compression_method(zip::CompressionMethod::Stored)
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755);
{
let mut zip = zip::ZipWriter::new(&mut writer);
@ -111,14 +113,14 @@ fn main() -> anyhow::Result<()> {
let rerun_files: HashSet<String> = HashSet::from([
"Cargo.lock".to_string(),
"Cargo.toml".to_string(),
"src/".to_string(),
"src".to_string(),
]);
output_reruns(&parent_dir, &rerun_files);
let features = get_features();
let results: Vec<anyhow::Result<(String, String, Vec<u8>)>> = entries
.par_iter()
.iter()
.filter_map(|entry_path| {
let parent_pkg_path = entry_path.join("pkg");
if !parent_pkg_path.exists() {
@ -160,7 +162,11 @@ fn main() -> anyhow::Result<()> {
}
writeln!(bootstrapped_processes, "];")?;
let bootstrapped_processes_path = pwd.join("src/bootstrapped_processes.rs");
let target_dir = pwd.join("../target");
if !target_dir.exists() {
fs::create_dir_all(&target_dir)?;
}
let bootstrapped_processes_path = target_dir.join("bootstrapped_processes.rs");
fs::write(&bootstrapped_processes_path, bootstrapped_processes)?;
Ok(())

View File

@ -76,7 +76,6 @@ pub fn fetch_state(our: Address, provider: eth::Provider) -> State {
pub fn app_store_filter(state: &State) -> eth::Filter {
eth::Filter::new()
.address(eth::Address::from_str(&state.contract_address).unwrap())
.from_block(state.last_saved_block)
.events(EVENTS)
}
@ -84,7 +83,10 @@ pub fn app_store_filter(state: &State) -> eth::Filter {
pub fn fetch_and_subscribe_logs(state: &mut State) {
let filter = app_store_filter(state);
// get past logs, subscribe to new ones.
for log in fetch_logs(&state.provider, &filter) {
for log in fetch_logs(
&state.provider,
&filter.clone().from_block(state.last_saved_block),
) {
if let Err(e) = state.ingest_contract_event(log, false) {
println!("error ingesting log: {e:?}");
};

View File

@ -6,8 +6,8 @@ wit_bindgen::generate!({
world: "process-v0",
});
/// 20 minutes
const REFRESH_INTERVAL: u64 = 20 * 60 * 1000;
/// 2 hours
const REFRESH_INTERVAL: u64 = 120 * 60 * 1000;
#[derive(Serialize, Deserialize)]
struct KinodeBlogPost {

View File

@ -59,9 +59,9 @@ sol! {
event RoutingUpdate(bytes32 indexed node, bytes32[] routers);
}
fn subscribe_to_logs(eth_provider: &eth::Provider, from_block: u64, filter: eth::Filter) {
fn subscribe_to_logs(eth_provider: &eth::Provider, filter: eth::Filter) {
loop {
match eth_provider.subscribe(1, filter.clone().from_block(from_block)) {
match eth_provider.subscribe(1, filter.clone()) {
Ok(()) => break,
Err(_) => {
println!("failed to subscribe to chain! trying again in 5s...");
@ -126,7 +126,6 @@ fn init(our: Address) {
fn main(our: Address, mut state: State) -> anyhow::Result<()> {
let filter = eth::Filter::new()
.address(state.contract_address.parse::<eth::Address>().unwrap())
.from_block(state.block - 1)
.to_block(eth::BlockNumberOrTag::Latest)
.events(vec![
"NodeRegistered(bytes32,bytes)",
@ -147,11 +146,11 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
state.chain_id
);
subscribe_to_logs(&eth_provider, state.block - 1, filter.clone());
subscribe_to_logs(&eth_provider, filter.clone());
// if block in state is < current_block, get logs from that part.
loop {
match eth_provider.get_logs(&filter) {
match eth_provider.get_logs(&filter.clone().from_block(state.block - 1)) {
Ok(logs) => {
for log in logs {
match handle_log(&our, &mut state, &log) {
@ -277,7 +276,7 @@ fn handle_eth_message(
}
Err(_e) => {
println!("got eth subscription error");
subscribe_to_logs(&eth_provider, state.block - 1, filter.clone());
subscribe_to_logs(&eth_provider, filter.clone());
}
}

View File

@ -14,7 +14,7 @@ use std::{
};
use tokio::{fs, io::AsyncWriteExt, sync::Mutex};
include!("bootstrapped_processes.rs");
include!("../../target/bootstrapped_processes.rs");
pub async fn load_state(
our_name: String,

View File

@ -1,7 +1,7 @@
[package]
name = "lib"
authors = ["KinodeDAO"]
version = "0.8.3"
version = "0.8.4"
edition = "2021"
description = "A general-purpose sovereign cloud computing platform"
homepage = "https://kinode.org"