Merge pull request #106 from uqbar-dao/bp/vfs-meta

vfs: add metadata to ReadDir
This commit is contained in:
dr-frmr 2023-12-20 17:54:42 -05:00 committed by GitHub
commit 415832effd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 121 additions and 7 deletions

View File

@ -410,8 +410,16 @@ async fn check_caps(
)
.await?;
let db_path = format!("{}{}", kv_path, request.db.to_string());
if open_kvs.contains_key(&(request.package_id.clone(), request.db.clone())) {
return Err(KvError::DbAlreadyExists);
}
let db_path = format!(
"{}/{}/{}",
kv_path,
request.package_id.to_string(),
request.db.to_string()
);
fs::create_dir_all(&db_path).await?;
let db = OptimisticTransactionDB::open_default(&db_path)?;

View File

@ -2,13 +2,33 @@ use anyhow::Result;
use dashmap::DashMap;
use rusqlite::types::{FromSql, FromSqlError, ToSql, ValueRef};
use rusqlite::Connection;
use std::collections::{HashMap, VecDeque};
use std::collections::{HashMap, HashSet, VecDeque};
use std::sync::Arc;
use tokio::fs;
use tokio::sync::Mutex;
use crate::types::*;
lazy_static::lazy_static! {
static ref READ_KEYWORDS: HashSet<String> = {
let mut set = HashSet::new();
let keywords = ["ANALYZE", "ATTACH", "BEGIN", "EXPLAIN", "PRAGMA", "SELECT", "VALUES", "WITH"];
for &keyword in &keywords {
set.insert(keyword.to_string());
}
set
};
static ref WRITE_KEYWORDS: HashSet<String> = {
let mut set = HashSet::new();
let keywords = ["ALTER", "ANALYZE", "COMMIT", "CREATE", "DELETE", "DETACH", "DROP", "END", "INSERT", "REINDEX", "RELEASE", "RENAME", "REPLACE", "ROLLBACK", "SAVEPOINT", "UPDATE", "VACUUM"];
for &keyword in &keywords {
set.insert(keyword.to_string());
}
set
};
}
pub async fn sqlite(
our_node: String,
send_to_loop: MessageSender,
@ -57,6 +77,7 @@ pub async fn sqlite(
let send_to_terminal = send_to_terminal.clone();
let send_to_loop = send_to_loop.clone();
let open_dbs = open_dbs.clone();
let txs = txs.clone();
let sqlite_path = sqlite_path.clone();
@ -149,6 +170,14 @@ async fn handle_request(
}
};
let db = db.lock().await;
let first_word = query
.split_whitespace()
.next()
.map(|word| word.to_uppercase())
.unwrap_or("".to_string());
if !READ_KEYWORDS.contains(&first_word) {
return Err(SqliteError::NotAReadKeyword.into());
}
let parameters = get_json_params(payload)?;
@ -196,6 +225,16 @@ async fn handle_request(
};
let db = db.lock().await;
let first_word = statement
.split_whitespace()
.next()
.map(|word| word.to_uppercase())
.unwrap_or("".to_string());
if !WRITE_KEYWORDS.contains(&first_word) {
return Err(SqliteError::NotAWriteKeyword.into());
}
let parameters = get_json_params(payload)?;
match tx_id {
@ -385,12 +424,22 @@ async fn check_caps(
)
.await?;
let db_path = format!("{}{}", sqlite_path, request.db.to_string());
if open_dbs.contains_key(&(request.package_id.clone(), request.db.clone())) {
return Err(SqliteError::DbAlreadyExists);
}
let db_path = format!(
"{}/{}/{}",
sqlite_path,
request.package_id.to_string(),
request.db.to_string()
);
fs::create_dir_all(&db_path).await?;
let db = Connection::open(&db_path)?;
db.execute("PRAGMA journal_mode=WAL;", [])?;
let db_file_path = format!("{}/{}.db", db_path, request.db.to_string());
let db = Connection::open(&db_file_path)?;
let _ = db.execute("PRAGMA journal_mode=WAL", []);
open_dbs.insert(
(request.package_id.clone(), request.db.clone()),

View File

@ -986,6 +986,7 @@ pub enum VfsAction {
RemoveDir,
RemoveDirAll,
Rename(String),
Metadata,
AddZip,
Len,
SetLen(u64),
@ -999,13 +1000,34 @@ pub enum SeekFrom {
Current(i64),
}
#[derive(Debug, Serialize, Deserialize)]
pub enum FileType {
File,
Directory,
Symlink,
Other,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FileMetadata {
pub file_type: FileType,
pub len: u64,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DirEntry {
pub path: String,
pub file_type: FileType,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum VfsResponse {
Ok,
Err(VfsError),
Read,
ReadDir(Vec<String>),
ReadDir(Vec<DirEntry>),
ReadToString(String),
Metadata(FileMetadata),
Len(u64),
Hash([u8; 32]),
}

View File

@ -271,7 +271,13 @@ async fn handle_request(
let entry_path = entry.path();
let relative_path = entry_path.strip_prefix(&vfs_path).unwrap_or(&entry_path);
entries.push(relative_path.display().to_string());
let metadata = entry.metadata().await?;
let file_type = get_file_type(&metadata);
let dir_entry = DirEntry {
path: relative_path.display().to_string(),
file_type,
};
entries.push(dir_entry);
}
(
serde_json::to_vec(&VfsResponse::ReadDir(entries)).unwrap(),
@ -327,6 +333,22 @@ async fn handle_request(
fs::rename(path, new_path).await?;
(serde_json::to_vec(&VfsResponse::Ok).unwrap(), None)
}
VfsAction::Metadata => {
let file = open_file(open_files.clone(), path, false).await?;
let file = file.lock().await;
let metadata = file.metadata().await?;
let file_type = get_file_type(&metadata);
let meta = FileMetadata {
len: metadata.len(),
file_type,
};
(
serde_json::to_vec(&VfsResponse::Metadata(meta)).unwrap(),
None,
)
}
VfsAction::Len => {
let file = open_file(open_files.clone(), path, false).await?;
let file = file.lock().await;
@ -590,6 +612,7 @@ async fn check_caps(
| VfsAction::ReadToString
| VfsAction::Seek(_)
| VfsAction::Hash
| VfsAction::Metadata
| VfsAction::Len => {
if src_package_id == package_id {
return Ok(());
@ -692,6 +715,18 @@ async fn add_capability(
Ok(())
}
fn get_file_type(metadata: &std::fs::Metadata) -> FileType {
if metadata.is_file() {
FileType::File
} else if metadata.is_dir() {
FileType::Directory
} else if metadata.file_type().is_symlink() {
FileType::Symlink
} else {
FileType::Other
}
}
fn make_error_message(
our_node: String,
id: u64,