Merge pull request #335 from kinode-dao/hf/wit-apis

wit apis
This commit is contained in:
nick.kino 2024-05-16 10:28:44 -07:00 committed by GitHub
commit feced37955
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
46 changed files with 1018 additions and 738 deletions

2
.gitignore vendored
View File

@ -10,8 +10,6 @@ wit/
*.swo
*.zip
/home
packages/**/pkg/*.wasm
packages/**/wit
*/**/node_modules
.env
kinode/src/bootstrapped_processes.rs

97
Cargo.lock generated
View File

@ -3084,7 +3084,7 @@ dependencies = [
"hmac",
"http 1.1.0",
"jwt",
"kit 0.3.1 (git+https://github.com/kinode-dao/kit?rev=25b474a)",
"kit 0.4.1",
"lazy_static",
"lib",
"log",
@ -3131,7 +3131,7 @@ dependencies = [
[[package]]
name = "kinode_process_lib"
version = "0.6.0"
source = "git+https://github.com/kinode-dao/process_lib?rev=84b3d84#84b3d84c7c31185f15691a288f1b45dbffb18fe2"
source = "git+https://github.com/kinode-dao/process_lib.git?rev=84b3d84#84b3d84c7c31185f15691a288f1b45dbffb18fe2"
dependencies = [
"alloy-json-rpc 0.1.0 (git+https://github.com/alloy-rs/alloy?rev=6f8ebb4)",
"alloy-primitives 0.6.4",
@ -3149,6 +3149,28 @@ dependencies = [
"wit-bindgen",
]
[[package]]
name = "kinode_process_lib"
version = "0.7.0"
source = "git+https://github.com/kinode-dao/process_lib.git?rev=2aa3a1a#2aa3a1a22e8a88e46864d474d777422eb1f1b60b"
dependencies = [
"alloy-json-rpc 0.1.0 (git+https://github.com/alloy-rs/alloy?rev=cad7935)",
"alloy-primitives 0.7.0",
"alloy-rpc-types 0.1.0 (git+https://github.com/alloy-rs/alloy?rev=cad7935)",
"alloy-transport 0.1.0 (git+https://github.com/alloy-rs/alloy?rev=cad7935)",
"anyhow",
"bincode",
"http 1.1.0",
"mime_guess",
"rand 0.8.5",
"rmp-serde",
"serde",
"serde_json",
"thiserror",
"url",
"wit-bindgen",
]
[[package]]
name = "kinode_process_lib"
version = "0.7.1"
@ -3171,40 +3193,6 @@ dependencies = [
"wit-bindgen",
]
[[package]]
name = "kit"
version = "0.3.1"
source = "git+https://github.com/kinode-dao/kit?rev=25b474a#25b474abec180f6ae63b3f2c13f459a180b5be73"
dependencies = [
"anyhow",
"base64 0.21.7",
"clap",
"color-eyre",
"dirs 5.0.1",
"fs-err",
"futures-util",
"git2",
"hex",
"kinode_process_lib 0.6.0",
"nix",
"regex",
"reqwest 0.11.27",
"rmp-serde",
"semver 1.0.22",
"serde",
"serde_json",
"thiserror",
"tokio",
"tokio-tungstenite 0.20.1",
"toml",
"tracing",
"tracing-appender",
"tracing-error",
"tracing-subscriber",
"walkdir",
"zip 0.6.6",
]
[[package]]
name = "kit"
version = "0.3.1"
@ -3239,6 +3227,41 @@ dependencies = [
"zip 0.6.6",
]
[[package]]
name = "kit"
version = "0.4.1"
source = "git+https://github.com/kinode-dao/kit?rev=9ad80d2#9ad80d25db1c0fd137ef236f26e506e89470b0ad"
dependencies = [
"anyhow",
"base64 0.21.7",
"clap",
"color-eyre",
"dirs 5.0.1",
"fs-err",
"futures-util",
"git2",
"hex",
"kinode_process_lib 0.7.0",
"nix",
"regex",
"reqwest 0.11.27",
"rmp-serde",
"semver 1.0.22",
"serde",
"serde_json",
"sha2",
"thiserror",
"tokio",
"tokio-tungstenite 0.20.1",
"toml",
"tracing",
"tracing-appender",
"tracing-error",
"tracing-subscriber",
"walkdir",
"zip 0.6.6",
]
[[package]]
name = "kns_indexer"
version = "0.2.1"
@ -3279,7 +3302,7 @@ version = "0.7.4"
dependencies = [
"alloy-rpc-types 0.1.0 (git+https://github.com/alloy-rs/alloy?rev=6f8ebb4)",
"anyhow",
"kit 0.3.1 (git+https://github.com/kinode-dao/kit?rev=659f59e)",
"kit 0.3.1",
"lazy_static",
"rand 0.8.5",
"reqwest 0.12.4",

View File

@ -14,7 +14,7 @@ path = "src/main.rs"
[build-dependencies]
anyhow = "1.0.71"
kit = { git = "https://github.com/kinode-dao/kit", rev = "25b474a" }
kit = { git = "https://github.com/kinode-dao/kit", rev = "9ad80d2" }
rayon = "1.8.1"
sha2 = "0.10"
tokio = "1.28"

View File

@ -59,7 +59,7 @@ fn build_and_zip_package(
) -> anyhow::Result<(String, String, Vec<u8>)> {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
kit::build::execute(&entry_path, true, false, true, features)
kit::build::execute(&entry_path, true, false, true, features, None, None) // TODO
.await
.map_err(|e| anyhow::anyhow!("{:?}", e))?;

View File

@ -17,6 +17,14 @@ pub enum RemoteRequest {
package_id: PackageId,
desired_version_hash: Option<String>,
},
/// Request a package API from another node who we expect to
/// be mirroring it. If the remote node is mirroring the package,
/// they must respond with RemoteResponse::DownloadApproved,
/// at which point requester can expect an FTWorkerRequest::Receive.
DownloadApi {
package_id: PackageId,
desired_version_hash: String,
},
}
/// The response expected from sending a [`RemoteRequest`].
@ -45,6 +53,7 @@ pub enum LocalRequest {
/// This is used for locally installing a package.
NewPackage {
package: PackageId,
metadata: kernel_types::Erc721Metadata,
/// Sets whether we will mirror this package for others
mirror: bool,
},
@ -93,6 +102,10 @@ pub enum LocalRequest {
/// This is an expensive operation! Throw away our state and rebuild from scratch.
/// Re-index the locally downloaded/installed packages AND the onchain data.
RebuildIndex,
/// List all apps we have APIs for.
ListApis,
/// Return the given API, if we have it.
GetApi(PackageId),
}
/// Local responses take this form.
@ -107,6 +120,8 @@ pub enum LocalResponse {
MirrorResponse(MirrorResponse),
AutoUpdateResponse(AutoUpdateResponse),
RebuiltIndex,
ListApisResponse { apis: Vec<PackageId> },
GetApiResponse(GetApiResponse), // API in blob (or None)
}
// TODO for all: expand these to elucidate why something failed
@ -147,3 +162,9 @@ pub enum AutoUpdateResponse {
Success,
Failure,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum GetApiResponse {
Success,
Failure,
}

View File

@ -30,10 +30,18 @@ pub fn handle_http_request(
our: &Address,
state: &mut State,
eth_provider: &eth::Provider,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
req: &IncomingHttpRequest,
) -> anyhow::Result<()> {
match serve_paths(our, state, eth_provider, requested_packages, req) {
match serve_paths(
our,
state,
eth_provider,
requested_apis,
requested_packages,
req,
) {
Ok((status_code, _headers, body)) => send_response(
status_code,
Some(HashMap::from([(
@ -102,6 +110,7 @@ fn serve_paths(
our: &Address,
state: &mut State,
eth_provider: &eth::Provider,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
req: &IncomingHttpRequest,
) -> anyhow::Result<(StatusCode, Option<HashMap<String, String>>, Vec<u8>)> {
@ -432,7 +441,7 @@ fn serve_paths(
format!("Invalid method {method} for {bound_path}").into_bytes(),
));
}
crate::rebuild_index(our, state, eth_provider);
crate::rebuild_index(our, state, eth_provider, requested_apis);
Ok((StatusCode::OK, None, vec![]))
}
_ => Ok((

View File

@ -9,7 +9,7 @@ use std::collections::{HashMap, HashSet};
use std::str::FromStr;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});
@ -244,6 +244,7 @@ fn init(our: Address) {
// can change, log requests can take quite a long time.
let eth_provider = eth::Provider::new(CHAIN_ID, 60);
let mut requested_apis: HashMap<PackageId, RequestedPackage> = HashMap::new();
let mut requested_packages: HashMap<PackageId, RequestedPackage> = HashMap::new();
// get past logs, subscribe to new ones.
@ -254,7 +255,7 @@ fn init(our: Address) {
.events(EVENTS);
for log in fetch_logs(&eth_provider, &filter) {
if let Err(e) = state.ingest_listings_contract_event(&our, log) {
if let Err(e) = state.ingest_listings_contract_event(&our, log, &mut requested_apis) {
println!("error ingesting log: {e:?}");
};
}
@ -274,6 +275,7 @@ fn init(our: Address) {
&our,
&mut state,
&eth_provider,
&mut requested_apis,
&mut requested_packages,
&message,
) {
@ -304,9 +306,10 @@ fn init(our: Address) {
/// finally, fire a response if expected from a request.
fn handle_message(
our: &Address,
mut state: &mut State,
state: &mut State,
eth_provider: &eth::Provider,
mut requested_packages: &mut HashMap<PackageId, RequestedPackage>,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
message: &Message,
) -> anyhow::Result<()> {
match message {
@ -320,25 +323,32 @@ fn handle_message(
if our.node != source.node {
return Err(anyhow::anyhow!("local request from non-local node"));
}
let resp = handle_local_request(
let (body, blob) = handle_local_request(
&our,
&local_request,
&mut state,
state,
eth_provider,
&mut requested_packages,
requested_apis,
requested_packages,
);
if expects_response.is_some() {
Response::new().body(serde_json::to_vec(&resp)?).send()?;
let response = Response::new().body(serde_json::to_vec(&body)?);
let response = if let Some(blob) = blob {
response.blob(blob)
} else {
response
};
response.send()?;
}
}
Req::RemoteRequest(remote_request) => {
let resp = handle_remote_request(&our, &source, &remote_request, &mut state);
let resp = handle_remote_request(&our, &source, &remote_request, state);
if expects_response.is_some() {
Response::new().body(serde_json::to_vec(&resp)?).send()?;
}
}
Req::FTWorkerResult(FTWorkerResult::ReceiveSuccess(name)) => {
handle_receive_download(&our, &mut state, &name, &mut requested_packages)?;
handle_receive_download(&our, state, &name, requested_apis, requested_packages)?;
}
Req::FTWorkerCommand(_) => {
spawn_receive_transfer(&our, &body)?;
@ -351,7 +361,7 @@ fn handle_message(
return Err(anyhow::anyhow!("eth sub event from weird addr: {source}"));
}
if let Ok(eth::EthSub { result, .. }) = eth_result {
handle_eth_sub_event(our, &mut state, result)?;
handle_eth_sub_event(our, state, result, requested_apis)?;
} else {
println!("got eth subscription error");
// attempt to resubscribe
@ -374,8 +384,9 @@ fn handle_message(
if let HttpServerRequest::Http(req) = incoming {
http_api::handle_http_request(
our,
&mut state,
state,
eth_provider,
requested_apis,
requested_packages,
&req,
)?;
@ -450,6 +461,53 @@ fn handle_remote_request(
)),
}
}
RemoteRequest::DownloadApi {
package_id,
desired_version_hash,
} => {
let Some(package_state) = state.get_downloaded_package(package_id) else {
return Resp::RemoteResponse(RemoteResponse::DownloadDenied(
ReasonDenied::NoPackage,
));
};
if !package_state.mirroring {
return Resp::RemoteResponse(RemoteResponse::DownloadDenied(
ReasonDenied::NotMirroring,
));
}
if &package_state.our_version != desired_version_hash {
return Resp::RemoteResponse(RemoteResponse::DownloadDenied(
ReasonDenied::HashMismatch {
requested: desired_version_hash.clone(),
have: package_state.our_version.clone(),
},
));
}
let file_name = format!("/{}-api-v0.zip", package_id); // TODO: actual version
// get the .zip from VFS and attach as blob to response
let file_path = format!("/{}/pkg/api.zip", package_id);
let Ok(Ok(_)) = Request::to(("our", "vfs", "distro", "sys"))
.body(
serde_json::to_vec(&vfs::VfsRequest {
path: file_path,
action: vfs::VfsAction::Read,
})
.unwrap(),
)
.send_and_await_response(5)
else {
return Resp::RemoteResponse(RemoteResponse::DownloadDenied(
ReasonDenied::FileNotFound,
));
};
// transfer will *inherit* the blob bytes we receive from VFS
match spawn_transfer(&our, &file_name, None, 60, &source) {
Ok(()) => Resp::RemoteResponse(RemoteResponse::DownloadApproved),
Err(_e) => Resp::RemoteResponse(RemoteResponse::DownloadDenied(
ReasonDenied::WorkerSpawnFailed,
)),
}
}
}
}
@ -459,12 +517,20 @@ fn handle_local_request(
request: &LocalRequest,
state: &mut State,
eth_provider: &eth::Provider,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
) -> LocalResponse {
) -> (LocalResponse, Option<LazyLoadBlob>) {
match request {
LocalRequest::NewPackage { package, mirror } => {
LocalRequest::NewPackage {
package,
metadata,
mirror,
} => {
let Some(blob) = get_blob() else {
return LocalResponse::NewPackageResponse(NewPackageResponse::Failure);
return (
LocalResponse::NewPackageResponse(NewPackageResponse::Failure),
None,
);
};
// set the version hash for this new local package
let our_version = generate_version_hash(&blob.bytes);
@ -478,13 +544,35 @@ fn handle_local_request(
manifest_hash: None, // generated in the add fn
mirroring: *mirror,
auto_update: false, // can't auto-update a local package
metadata: None, // TODO
metadata: Some(metadata.clone()),
};
let Ok(()) = state.add_downloaded_package(package, package_state, Some(blob.bytes))
else {
return LocalResponse::NewPackageResponse(NewPackageResponse::Failure);
return (
LocalResponse::NewPackageResponse(NewPackageResponse::Failure),
None,
);
};
LocalResponse::NewPackageResponse(NewPackageResponse::Success)
let drive_path = format!("/{package}/pkg");
let result = Request::new()
.target(("our", "vfs", "distro", "sys"))
.body(
serde_json::to_vec(&vfs::VfsRequest {
path: format!("{}/api", drive_path),
action: vfs::VfsAction::Metadata,
})
.unwrap(),
)
.send_and_await_response(5);
if let Ok(Ok(_)) = result {
state.downloaded_apis.insert(package.to_owned());
};
(
LocalResponse::NewPackageResponse(NewPackageResponse::Success),
None,
)
}
LocalRequest::Download {
package: package_id,
@ -492,40 +580,102 @@ fn handle_local_request(
mirror,
auto_update,
desired_version_hash,
} => LocalResponse::DownloadResponse(start_download(
our,
requested_packages,
package_id,
download_from,
*mirror,
*auto_update,
desired_version_hash,
)),
LocalRequest::Install(package) => match handle_install(our, state, package) {
Ok(()) => LocalResponse::InstallResponse(InstallResponse::Success),
Err(_) => LocalResponse::InstallResponse(InstallResponse::Failure),
},
LocalRequest::Uninstall(package) => match state.uninstall(package) {
Ok(()) => LocalResponse::UninstallResponse(UninstallResponse::Success),
Err(_) => LocalResponse::UninstallResponse(UninstallResponse::Failure),
},
LocalRequest::StartMirroring(package) => match state.start_mirroring(package) {
true => LocalResponse::MirrorResponse(MirrorResponse::Success),
false => LocalResponse::MirrorResponse(MirrorResponse::Failure),
},
LocalRequest::StopMirroring(package) => match state.stop_mirroring(package) {
true => LocalResponse::MirrorResponse(MirrorResponse::Success),
false => LocalResponse::MirrorResponse(MirrorResponse::Failure),
},
LocalRequest::StartAutoUpdate(package) => match state.start_auto_update(package) {
true => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Success),
false => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Failure),
},
LocalRequest::StopAutoUpdate(package) => match state.stop_auto_update(package) {
true => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Success),
false => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Failure),
},
LocalRequest::RebuildIndex => rebuild_index(our, state, eth_provider),
} => (
LocalResponse::DownloadResponse(start_download(
our,
requested_packages,
package_id,
download_from,
*mirror,
*auto_update,
desired_version_hash,
)),
None,
),
LocalRequest::Install(package) => (
match handle_install(our, state, package) {
Ok(()) => LocalResponse::InstallResponse(InstallResponse::Success),
Err(_) => LocalResponse::InstallResponse(InstallResponse::Failure),
},
None,
),
LocalRequest::Uninstall(package) => (
match state.uninstall(package) {
Ok(()) => LocalResponse::UninstallResponse(UninstallResponse::Success),
Err(_) => LocalResponse::UninstallResponse(UninstallResponse::Failure),
},
None,
),
LocalRequest::StartMirroring(package) => (
match state.start_mirroring(package) {
true => LocalResponse::MirrorResponse(MirrorResponse::Success),
false => LocalResponse::MirrorResponse(MirrorResponse::Failure),
},
None,
),
LocalRequest::StopMirroring(package) => (
match state.stop_mirroring(package) {
true => LocalResponse::MirrorResponse(MirrorResponse::Success),
false => LocalResponse::MirrorResponse(MirrorResponse::Failure),
},
None,
),
LocalRequest::StartAutoUpdate(package) => (
match state.start_auto_update(package) {
true => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Success),
false => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Failure),
},
None,
),
LocalRequest::StopAutoUpdate(package) => (
match state.stop_auto_update(package) {
true => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Success),
false => LocalResponse::AutoUpdateResponse(AutoUpdateResponse::Failure),
},
None,
),
LocalRequest::RebuildIndex => (
rebuild_index(our, state, eth_provider, requested_apis),
None,
),
LocalRequest::ListApis => (list_apis(state), None),
LocalRequest::GetApi(ref package_id) => get_api(package_id, state),
}
}
pub fn get_api(package_id: &PackageId, state: &mut State) -> (LocalResponse, Option<LazyLoadBlob>) {
let (response, blob) = if !state.downloaded_apis.contains(package_id) {
(GetApiResponse::Failure, None)
} else {
let drive_path = format!("/{package_id}/pkg");
let result = Request::new()
.target(("our", "vfs", "distro", "sys"))
.body(
serde_json::to_vec(&vfs::VfsRequest {
path: format!("{}/api.zip", drive_path),
action: vfs::VfsAction::Read,
})
.unwrap(),
)
.send_and_await_response(5);
let Ok(Ok(_)) = result else {
return (LocalResponse::GetApiResponse(GetApiResponse::Failure), None);
};
let Some(blob) = get_blob() else {
return (LocalResponse::GetApiResponse(GetApiResponse::Failure), None);
};
let blob = LazyLoadBlob {
mime: Some("application/json".to_string()),
bytes: blob.bytes,
};
(GetApiResponse::Success, Some(blob))
};
(LocalResponse::GetApiResponse(response), blob)
}
pub fn list_apis(state: &mut State) -> LocalResponse {
LocalResponse::ListApisResponse {
apis: state.downloaded_apis.iter().cloned().collect(),
}
}
@ -533,6 +683,7 @@ pub fn rebuild_index(
our: &Address,
state: &mut State,
eth_provider: &eth::Provider,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
) -> LocalResponse {
*state = State::new(CONTRACT_ADDRESS.to_string()).unwrap();
// kill our old subscription and build a new one.
@ -546,7 +697,7 @@ pub fn rebuild_index(
subscribe_to_logs(&eth_provider, filter.clone());
for log in fetch_logs(&eth_provider, &filter) {
if let Err(e) = state.ingest_listings_contract_event(our, log) {
if let Err(e) = state.ingest_listings_contract_event(our, log, requested_apis) {
println!("error ingesting log: {e:?}");
};
}
@ -554,6 +705,43 @@ pub fn rebuild_index(
LocalResponse::RebuiltIndex
}
pub fn start_api_download(
our: &Address,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
package_id: PackageId,
download_from: &NodeId,
desired_version_hash: &str,
) -> DownloadResponse {
match Request::to((download_from.as_str(), our.process.clone()))
.inherit(true)
.body(
serde_json::to_vec(&RemoteRequest::DownloadApi {
package_id: package_id.clone(),
desired_version_hash: desired_version_hash.to_string(),
})
.unwrap(),
)
.send_and_await_response(5)
{
Ok(Ok(Message::Response { body, .. })) => match serde_json::from_slice::<Resp>(&body) {
Ok(Resp::RemoteResponse(RemoteResponse::DownloadApproved)) => {
requested_apis.insert(
package_id,
RequestedPackage {
from: download_from.to_string(),
mirror: false,
auto_update: false,
desired_version_hash: Some(desired_version_hash.to_string()),
},
);
DownloadResponse::Started
}
_ => DownloadResponse::Failure,
},
_ => DownloadResponse::Failure,
}
}
pub fn start_download(
our: &Address,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
@ -597,18 +785,83 @@ fn handle_receive_download(
our: &Address,
state: &mut State,
package_name: &str,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
) -> anyhow::Result<()> {
// remove leading / and .zip from file name to get package ID
let package_name = package_name[1..].trim_end_matches(".zip");
let Ok(package_id) = package_name.parse::<PackageId>() else {
return Err(anyhow::anyhow!(
"bad package filename fron download: {package_name}"
));
let package_name_split = package_name.split('-').collect::<Vec<_>>();
let [package_name, api, version] = package_name_split.as_slice() else {
return Err(anyhow::anyhow!(
"bad package filename fron download: {package_name}"
));
};
if api != &"api" || version.chars().next() != Some('v') {
return Err(anyhow::anyhow!(
"bad package filename fron download: {package_name}"
));
}
let Ok(package_id) = package_name.parse::<PackageId>() else {
return Err(anyhow::anyhow!(
"bad package filename fron download: {package_name}"
));
};
return handle_receive_download_api(our, state, package_id, version, requested_apis);
};
handle_receive_download_package(our, state, &package_id, requested_packages)
}
fn handle_receive_download_api(
our: &Address,
state: &mut State,
package_id: PackageId,
version: &str,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
) -> anyhow::Result<()> {
println!("successfully received api {}", package_id.package());
// only save the package if we actually requested it
let Some(requested_package) = requested_apis.remove(&package_id) else {
return Err(anyhow::anyhow!("received unrequested api--rejecting!"));
};
let Some(blob) = get_blob() else {
return Err(anyhow::anyhow!("received download but found no blob"));
};
// check the version hash for this download against requested!!
// for now we can reject if it's not latest.
let download_hash = generate_version_hash(&blob.bytes);
let mut verified = false;
let Some(hash) = requested_package.desired_version_hash else {
return Err(anyhow::anyhow!("must have version hash to match against"));
};
if download_hash != hash {
if hash.is_empty() {
println!(
"\x1b[33mwarning: downloaded api has no version hashes--cannot verify code integrity, proceeding anyways\x1b[0m"
);
} else {
return Err(anyhow::anyhow!(
"downloaded api is not desired version--rejecting download! download hash: {download_hash}, desired hash: {hash}"
));
}
} else {
verified = true;
}
state.add_downloaded_api(&package_id, Some(blob.bytes))?;
Ok(())
}
fn handle_receive_download_package(
our: &Address,
state: &mut State,
package_id: &PackageId,
requested_packages: &mut HashMap<PackageId, RequestedPackage>,
) -> anyhow::Result<()> {
println!("successfully received {}", package_id);
// only save the package if we actually requested it
let Some(requested_package) = requested_packages.remove(&package_id) else {
let Some(requested_package) = requested_packages.remove(package_id) else {
return Err(anyhow::anyhow!("received unrequested package--rejecting!"));
};
let Some(blob) = get_blob() else {
@ -636,7 +889,7 @@ fn handle_receive_download(
}
None => {
// check against `metadata.properties.current_version`
let Some(package_listing) = state.get_listing(&package_id) else {
let Some(package_listing) = state.get_listing(package_id) else {
return Err(anyhow::anyhow!(
"downloaded package cannot be found in manager--rejecting download!"
));
@ -671,7 +924,7 @@ fn handle_receive_download(
}
}
let old_manifest_hash = match state.downloaded_packages.get(&package_id) {
let old_manifest_hash = match state.downloaded_packages.get(package_id) {
Some(package_state) => package_state
.manifest_hash
.clone()
@ -680,7 +933,7 @@ fn handle_receive_download(
};
state.add_downloaded_package(
&package_id,
package_id,
PackageState {
mirrored_from: Some(requested_package.from),
our_version: download_hash,
@ -695,7 +948,7 @@ fn handle_receive_download(
Some(blob.bytes),
)?;
let new_manifest_hash = match state.downloaded_packages.get(&package_id) {
let new_manifest_hash = match state.downloaded_packages.get(package_id) {
Some(package_state) => package_state
.manifest_hash
.clone()
@ -706,7 +959,7 @@ fn handle_receive_download(
// lastly, if auto_update is true, AND the caps_hash has NOT changed,
// trigger install!
if requested_package.auto_update && old_manifest_hash == new_manifest_hash {
handle_install(our, state, &package_id)?;
handle_install(our, state, package_id)?;
}
Ok(())
}
@ -734,11 +987,12 @@ fn handle_eth_sub_event(
our: &Address,
state: &mut State,
event: eth::SubscriptionResult,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
) -> anyhow::Result<()> {
let eth::SubscriptionResult::Log(log) = event else {
return Err(anyhow::anyhow!("got non-log event"));
};
state.ingest_listings_contract_event(our, *log)
state.ingest_listings_contract_event(our, *log, requested_apis)
}
fn fetch_package_manifest(package: &PackageId) -> anyhow::Result<Vec<kt::PackageManifestEntry>> {

View File

@ -1,10 +1,10 @@
use crate::LocalRequest;
use crate::{start_api_download, DownloadResponse, LocalRequest};
use alloy_sol_types::{sol, SolEvent};
use kinode_process_lib::eth::Log;
use kinode_process_lib::kernel_types as kt;
use kinode_process_lib::{println, *};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
sol! {
event AppRegistered(
@ -105,6 +105,8 @@ pub struct State {
/// ingest apps on disk if we have to rebuild our state. this is also
/// updated every time we download, create, or uninstall a package.
pub downloaded_packages: HashMap<PackageId, PackageState>,
/// the APIs we have
pub downloaded_apis: HashSet<PackageId>,
}
impl State {
@ -118,6 +120,7 @@ impl State {
package_hashes: HashMap::new(),
listed_packages: HashMap::new(),
downloaded_packages: HashMap::new(),
downloaded_apis: HashSet::new(),
};
state.populate_packages_from_filesystem()?;
Ok(state)
@ -156,6 +159,47 @@ impl State {
self.downloaded_packages.get(package_id).cloned()
}
pub fn add_downloaded_api(
&mut self,
package_id: &PackageId,
package_bytes: Option<Vec<u8>>,
) -> anyhow::Result<()> {
if let Some(package_bytes) = package_bytes {
let drive_name = format!("/{package_id}/pkg");
let blob = LazyLoadBlob {
mime: Some("application/zip".to_string()),
bytes: package_bytes,
};
// create a new drive for this package in VFS
// this is possible because we have root access
Request::to(("our", "vfs", "distro", "sys"))
.body(serde_json::to_vec(&vfs::VfsRequest {
path: drive_name.clone(),
action: vfs::VfsAction::CreateDrive,
})?)
.send_and_await_response(5)??;
// convert the zip to a new package drive
let response = Request::to(("our", "vfs", "distro", "sys"))
.body(serde_json::to_vec(&vfs::VfsRequest {
path: drive_name.clone(),
action: vfs::VfsAction::AddZip,
})?)
.blob(blob.clone())
.send_and_await_response(5)??;
let vfs::VfsResponse::Ok = serde_json::from_slice::<vfs::VfsResponse>(response.body())?
else {
return Err(anyhow::anyhow!(
"cannot add NewPackage: do not have capability to access vfs"
));
};
}
self.downloaded_apis.insert(package_id.to_owned());
crate::set_state(&bincode::serialize(self)?);
Ok(())
}
pub fn add_downloaded_package(
&mut self,
package_id: &PackageId,
@ -313,7 +357,22 @@ impl State {
metadata: None,
},
None,
)?
)?;
let drive_path = format!("/{package_id}/pkg");
let result = Request::new()
.target(("our", "vfs", "distro", "sys"))
.body(
serde_json::to_vec(&vfs::VfsRequest {
path: format!("{}/api", drive_path),
action: vfs::VfsAction::Metadata,
})
.unwrap(),
)
.send_and_await_response(5);
if let Ok(Ok(_)) = result {
self.downloaded_apis.insert(package_id.to_owned());
};
}
}
Ok(())
@ -368,6 +427,7 @@ impl State {
&mut self,
our: &Address,
log: Log,
requested_apis: &mut HashMap<PackageId, RequestedPackage>,
) -> anyhow::Result<()> {
let block_number: u64 = log
.block_number
@ -416,21 +476,34 @@ impl State {
let listing = match self.get_listing_with_hash_mut(&package_hash) {
Some(current_listing) => {
current_listing.name = package_name;
current_listing.publisher = publisher_name;
current_listing.name = package_name.clone();
current_listing.publisher = publisher_name.clone();
current_listing.metadata_hash = metadata_hash;
current_listing.metadata = metadata;
current_listing.clone()
}
None => PackageListing {
owner: "".to_string(),
name: package_name,
publisher: publisher_name,
name: package_name.clone(),
publisher: publisher_name.clone(),
metadata_hash,
metadata,
},
};
self.insert_listing(package_hash, listing);
self.insert_listing(package_hash.clone(), listing);
let api_hash = ""; // TODO
let api_download_request_result = start_api_download(
our,
requested_apis,
PackageId::new(&package_name, &publisher_name),
&publisher_name,
api_hash,
);
match api_download_request_result {
DownloadResponse::Failure => println!("failed to get API for {package_name}"),
_ => {}
}
}
AppMetadataUpdated::SIGNATURE_HASH => {
let package_hash = log.topics()[1].to_string();

View File

@ -6,7 +6,7 @@ mod api;
use api::*;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -6,7 +6,7 @@ mod ft_worker_lib;
use ft_worker_lib::*;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -6,7 +6,7 @@ mod api;
use api::*;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.3.1": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -6,7 +6,7 @@ mod api;
use api::*;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -0,0 +1,32 @@
interface chess {
/// Our "chess protocol" request/response format. We'll always serialize these
/// to a byte vector and send them over IPC.
variant request {
new-game(new-game-request),
move(move-request),
resign(string),
}
variant response {
new-game-accepted,
new-game-rejected,
move-accepted,
move-rejected,
}
record new-game-request {
white: string,
black: string,
}
record move-request {
game-id: string,
move-str: string,
}
}
world chess-sys-v0 {
import chess;
include process;
}

View File

@ -8,28 +8,12 @@ use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
extern crate base64;
use crate::kinode::process::chess::{
MoveRequest, NewGameRequest, Request as ChessRequest, Response as ChessResponse,
};
const ICON: &str = include_str!("icon");
//
// Our "chess protocol" request/response format. We'll always serialize these
// to a byte vector and send them over IPC.
//
#[derive(Debug, Serialize, Deserialize)]
enum ChessRequest {
NewGame { white: String, black: String },
Move { game_id: String, move_str: String },
Resign(String),
}
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
enum ChessResponse {
NewGameAccepted,
NewGameRejected,
MoveAccepted,
MoveRejected,
}
//
// Our serializable state format.
//
@ -98,8 +82,10 @@ fn send_ws_update(our: &Address, game: &Game, open_channels: &HashSet<u32>) -> a
// Boilerplate: generate the wasm bindings for a process
wit_bindgen::generate!({
path: "wit",
world: "process",
path: "target/wit",
world: "chess-sys-v0",
generate_unused_types: true,
additional_derives: [PartialEq, serde::Deserialize, serde::Serialize],
});
// After generating bindings, use this macro to define the Component struct
// and its init() function, which the kernel will look for on startup.
@ -250,7 +236,7 @@ fn handle_chess_request(
let game_id = source_node;
match action {
ChessRequest::NewGame { white, black } => {
ChessRequest::NewGame(NewGameRequest { white, black }) => {
// Make a new game with source.node
// This will replace any existing game with source.node!
if state.games.contains_key(game_id) {
@ -277,7 +263,7 @@ fn handle_chess_request(
.body(serde_json::to_vec(&ChessResponse::NewGameAccepted)?)
.send()
}
ChessRequest::Move { ref move_str, .. } => {
ChessRequest::Move(MoveRequest { ref move_str, .. }) => {
// Get the associated game, and respond with an error if
// we don't have it in our state.
let Some(game) = state.games.get_mut(game_id) else {
@ -330,7 +316,7 @@ fn handle_local_request(
action: &ChessRequest,
) -> anyhow::Result<()> {
match action {
ChessRequest::NewGame { white, black } => {
ChessRequest::NewGame(NewGameRequest { white, black }) => {
// Create a new game. We'll enforce that one of the two players is us.
if white != &our.node && black != &our.node {
return Err(anyhow::anyhow!("cannot start a game without us!"));
@ -371,7 +357,7 @@ fn handle_local_request(
save_chess_state(&state);
Ok(())
}
ChessRequest::Move { game_id, move_str } => {
ChessRequest::Move(MoveRequest { game_id, move_str }) => {
// Make a move. We'll enforce that it's our turn. The game_id is the
// person we're playing with.
let Some(game) = state.games.get_mut(game_id) else {
@ -489,10 +475,12 @@ fn handle_http_request(
// send the other player a new game request
let Ok(msg) = Request::new()
.target((game_id, our.process.clone()))
.body(serde_json::to_vec(&ChessRequest::NewGame {
white: player_white.clone(),
black: player_black.clone(),
})?)
.body(serde_json::to_vec(&ChessRequest::NewGame(
NewGameRequest {
white: player_white.clone(),
black: player_black.clone(),
},
))?)
.send_and_await_response(5)?
else {
return Err(anyhow::anyhow!(
@ -588,10 +576,10 @@ fn handle_http_request(
// if so, update the records
let Ok(msg) = Request::new()
.target((game_id, our.process.clone()))
.body(serde_json::to_vec(&ChessRequest::Move {
.body(serde_json::to_vec(&ChessRequest::Move(MoveRequest {
game_id: game_id.to_string(),
move_str: move_str.to_string(),
})?)
}))?)
.send_and_await_response(5)?
else {
return Err(anyhow::anyhow!(

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.2.1": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -0,0 +1,23 @@
interface homepage {
/// The request format to add or remove an app from the homepage. You must have messaging
/// access to `homepage:homepage:sys` in order to perform this. Serialize using serde_json.
variant request {
/// the package and process name will come from request source.
/// the path will automatically have the process_id prepended.
/// the icon is a base64 encoded image.
add(add-request),
remove,
}
record add-request {
label: string,
icon: option<string>,
path: option<string>,
widget: option<string>,
}
}
world homepage-sys-v0 {
import homepage;
include process;
}

View File

@ -10,21 +10,7 @@ use kinode_process_lib::{
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
/// The request format to add or remove an app from the homepage. You must have messaging
/// access to `homepage:homepage:sys` in order to perform this. Serialize using serde_json.
#[derive(Serialize, Deserialize)]
enum HomepageRequest {
/// the package and process name will come from request source.
/// the path will automatically have the process_id prepended.
/// the icon is a base64 encoded image.
Add {
label: String,
icon: Option<String>,
path: Option<String>,
widget: Option<String>,
},
Remove,
}
use crate::kinode::process::homepage::{AddRequest, Request as HomepageRequest};
#[derive(Serialize, Deserialize)]
struct HomepageApp {
@ -36,8 +22,10 @@ struct HomepageApp {
}
wit_bindgen::generate!({
path: "wit",
world: "process",
path: "target/wit",
world: "homepage-sys-v0",
generate_unused_types: true,
additional_derives: [serde::Deserialize, serde::Serialize],
});
call_init!(init);
@ -93,12 +81,12 @@ fn init(our: Address) {
// they must have messaging access to us in order to perform this.
if let Ok(request) = serde_json::from_slice::<HomepageRequest>(message.body()) {
match request {
HomepageRequest::Add {
HomepageRequest::Add(AddRequest {
label,
icon,
path,
widget,
} => {
}) => {
app_data.insert(
message.source().process.to_string(),
HomepageApp {

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.1.1": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

File diff suppressed because it is too large Load Diff

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.1.0": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -2,7 +2,7 @@ use kinode_process_lib::{call_init, http, timer, Address, Request};
use serde::{Deserialize, Serialize};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -0,0 +1,40 @@
interface kns-indexer {
/// IndexerRequests are used to query discrete information from the indexer
/// for example, if you want to know the human readable name for a namehash,
/// you would send a NamehashToName request.
/// If you want to know the most recent on-chain routing information for a
/// human readable name, you would send a NodeInfo request.
/// The block parameter specifies the recency of the data: the indexer will
/// not respond until it has processed events up to the specified block.
variant indexer-requests {
/// return the human readable name for a namehash
/// returns an Option<String>
namehash-to-name(namehash-to-name-request),
/// return the most recent on-chain routing information for a node name.
/// returns an Option<KnsUpdate>
/// set block to 0 if you just want to get the current state of the indexer
node-info(node-info-request),
/// return the entire state of the indexer at the given block
/// set block to 0 if you just want to get the current state of the indexer
get-state(get-state-request),
}
record namehash-to-name-request {
hash: string,
block: u64,
}
record node-info-request {
name: string,
block: u64,
}
record get-state-request {
block: u64,
}
}
world kns-indexer-sys-v0 {
import kns-indexer;
include process;
}

View File

@ -1,7 +1,7 @@
use kinode_process_lib::{await_next_message_body, call_init, eth, println, Address};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -8,9 +8,15 @@ use std::collections::{
BTreeMap,
};
use crate::kinode::process::kns_indexer::{
GetStateRequest, IndexerRequests, NamehashToNameRequest, NodeInfoRequest,
};
wit_bindgen::generate!({
path: "wit",
world: "process",
path: "target/wit",
world: "kns-indexer-sys-v0",
generate_unused_types: true,
additional_derives: [serde::Deserialize, serde::Serialize],
});
#[cfg(not(feature = "simulation-mode"))]
@ -42,27 +48,6 @@ struct State {
block: u64,
}
/// IndexerRequests are used to query discrete information from the indexer
/// for example, if you want to know the human readable name for a namehash,
/// you would send a NamehashToName request.
/// If you want to know the most recent on-chain routing information for a
/// human readable name, you would send a NodeInfo request.
/// The block parameter specifies the recency of the data: the indexer will
/// not respond until it has processed events up to the specified block.
#[derive(Debug, Serialize, Deserialize)]
pub enum IndexerRequests {
/// return the human readable name for a namehash
/// returns an Option<String>
NamehashToName { hash: String, block: u64 },
/// return the most recent on-chain routing information for a node name.
/// returns an Option<KnsUpdate>
/// set block to 0 if you just want to get the current state of the indexer
NodeInfo { name: String, block: u64 },
/// return the entire state of the indexer at the given block
/// set block to 0 if you just want to get the current state of the indexer
GetState { block: u64 },
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NetAction {
KnsUpdate(KnsUpdate),
@ -262,7 +247,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
};
match request {
IndexerRequests::NamehashToName { ref hash, block } => {
IndexerRequests::NamehashToName(NamehashToNameRequest { ref hash, block }) => {
if block <= state.block {
Response::new()
.body(serde_json::to_vec(&state.names.get(hash))?)
@ -274,7 +259,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
.push(request);
}
}
IndexerRequests::NodeInfo { ref name, block } => {
IndexerRequests::NodeInfo(NodeInfoRequest { ref name, block }) => {
if block <= state.block {
Response::new()
.body(serde_json::to_vec(&state.nodes.get(name))?)
@ -286,7 +271,7 @@ fn main(our: Address, mut state: State) -> anyhow::Result<()> {
.push(request);
}
}
IndexerRequests::GetState { block } => {
IndexerRequests::GetState(GetStateRequest { block }) => {
if block <= state.block {
Response::new().body(serde_json::to_vec(&state)?).send()?;
} else {
@ -337,19 +322,19 @@ fn handle_eth_message(
if *block <= state.block {
for request in requests.iter() {
match request {
IndexerRequests::NamehashToName { hash, .. } => {
IndexerRequests::NamehashToName(NamehashToNameRequest { hash, .. }) => {
Response::new()
.body(serde_json::to_vec(&state.names.get(hash))?)
.send()
.unwrap();
}
IndexerRequests::NodeInfo { name, .. } => {
IndexerRequests::NodeInfo(NodeInfoRequest { name, .. }) => {
Response::new()
.body(serde_json::to_vec(&state.nodes.get(name))?)
.send()
.unwrap();
}
IndexerRequests::GetState { .. } => {
IndexerRequests::GetState(GetStateRequest { .. }) => {
Response::new()
.body(serde_json::to_vec(&state)?)
.send()

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.2.1": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
use std::collections::HashMap;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.1.0": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -133,7 +133,7 @@ impl SettingsState {
}
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -4,7 +4,7 @@ use kinode_process_lib::{
use serde::{Deserialize, Serialize};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -3,7 +3,7 @@ use kinode_process_lib::{
};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -1,7 +1,7 @@
use kinode_process_lib::{await_next_message_body, call_init, println, Address};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -3,7 +3,7 @@ use kinode_process_lib::{
};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -5,7 +5,7 @@ use kinode_process_lib::{
use regex::Regex;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.1.1": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -3,7 +3,7 @@ use kinode_process_lib::{
};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -1,7 +1,7 @@
use kinode_process_lib::{call_init, net, println, Address, Message, Request};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -3,7 +3,7 @@ use kinode_process_lib::{
};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -2,7 +2,7 @@ use kinode_process_lib::{call_init, net, println, Address, Message, NodeId, Requ
use serde::{Deserialize, Serialize};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -4,7 +4,7 @@ use kinode_process_lib::{
};
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -9,8 +9,9 @@
"mirrors": [],
"code_hashes": {
"0.1.1": ""
}
},
"dependencies": []
},
"external_url": "https://kinode.org",
"animation_url": ""
}
}

View File

@ -10,7 +10,7 @@ mod tester_types;
use tester_types as tt;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -10,7 +10,7 @@ mod tester_types;
use tester_types as tt;
wit_bindgen::generate!({
path: "wit",
path: "target/wit",
world: "process",
});

View File

@ -297,7 +297,7 @@ async fn handle_request(
/// function run only upon fresh boot.
///
/// for each folder in /modules, looks for a package.zip file, extracts the contents,
/// for each included package.zip file, extracts the contents,
/// sends the contents to VFS, and reads the manifest.json.
///
/// the manifest.json contains instructions for which processes to boot and what

View File

@ -1072,6 +1072,7 @@ pub struct Erc721Metadata {
/// - `license`: An optional field containing the license of the package.
/// - `screenshots`: An optional field containing a list of URLs to screenshots of the package.
/// - `wit_version`: An optional field containing the version of the WIT standard that the package adheres to.
/// - `dependencies`: An optional field containing a list of `PackageId`s: API dependencies
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Erc721Properties {
pub package_name: String,
@ -1082,6 +1083,7 @@ pub struct Erc721Properties {
pub license: Option<String>,
pub screenshots: Option<Vec<String>>,
pub wit_version: Option<(u32, u32, u32)>,
pub dependencies: Option<Vec<String>>,
}
/// the type that gets deserialized from each entry in the array in `manifest.json`