refactor: rename the domain term prefetcher to fetcher

This commit is contained in:
sxyazi 2024-05-28 00:28:17 +08:00
parent b81b707a3e
commit 46cd42f923
No known key found for this signature in database
15 changed files with 50 additions and 50 deletions

View File

@ -12,7 +12,7 @@ Yazi (means "duck") is a terminal file manager written in Rust, based on non-blo
- 💪 **Powerful Async Task Scheduling and Management**: Provides real-time progress updates, task cancellation, and internal task priority assignment.
- 🖼️ **Built-in Support for Multiple Image Protocols**: Also integrated with Überzug++, covering almost all terminals.
- 🌟 **Built-in Code Highlighting and Image Decoding**: Combined with the pre-loading mechanism, greatly accelerates image and normal file loading.
- 🔌 **Concurrent Plugin System**: UI plugins (rewriting most of the UI), functional plugins, custom previewer/preloader/prefetcher; Just some pieces of Lua.
- 🔌 **Concurrent Plugin System**: UI plugins (rewriting most of the UI), functional plugins, custom previewer/preloader/fetcher; Just some pieces of Lua.
- 📡 **Data Distribution Service**: Built on a client-server architecture (no additional server process required), integrated with a Lua-based publish-subscribe model, achieving cross-instance communication and state persistence.
- 📦 **Package Manager**: Install plugins and themes with one command, keeping them always up to date, or pin them to a specific version.
- 🧰 Integration with fd, rg, fzf, zoxide

View File

@ -80,7 +80,7 @@ suppress_preload = false
[plugin]
prefetchers = [
fetchers = [
# Mimetype
{ name = "*", cond = "!mime", run = "mime", prio = "high" },
]

View File

@ -4,7 +4,7 @@ use yazi_shared::{event::Cmd, Condition};
use crate::{Pattern, Priority};
#[derive(Debug, Deserialize)]
pub struct Prefetcher {
pub struct Fetcher {
#[serde(skip)]
pub id: u8,
pub cond: Option<Condition>,
@ -16,14 +16,14 @@ pub struct Prefetcher {
}
#[derive(Debug, Clone)]
pub struct PrefetcherProps {
pub struct FetcherProps {
pub id: u8,
pub name: String,
pub prio: Priority,
}
impl From<&Prefetcher> for PrefetcherProps {
fn from(prefetcher: &Prefetcher) -> Self {
Self { id: prefetcher.id, name: prefetcher.run.name.to_owned(), prio: prefetcher.prio }
impl From<&Fetcher> for FetcherProps {
fn from(fetcher: &Fetcher) -> Self {
Self { id: fetcher.id, name: fetcher.run.name.to_owned(), prio: fetcher.prio }
}
}

View File

@ -1,10 +1,10 @@
mod fetcher;
mod plugin;
mod prefetcher;
mod preloader;
mod previewer;
pub use fetcher::*;
pub use plugin::*;
pub use prefetcher::*;
pub use preloader::*;
pub use previewer::*;

View File

@ -3,14 +3,14 @@ use std::path::Path;
use serde::Deserialize;
use yazi_shared::MIME_DIR;
use super::{Prefetcher, Preloader, Previewer};
use super::{Fetcher, Preloader, Previewer};
use crate::{plugin::MAX_PREWORKERS, Preset, MERGED_YAZI};
#[derive(Deserialize)]
pub struct Plugin {
pub prefetchers: Vec<Prefetcher>,
pub preloaders: Vec<Preloader>,
pub previewers: Vec<Previewer>,
pub fetchers: Vec<Fetcher>,
pub preloaders: Vec<Preloader>,
pub previewers: Vec<Previewer>,
}
impl Default for Plugin {
@ -22,11 +22,11 @@ impl Default for Plugin {
#[derive(Deserialize)]
struct Shadow {
prefetchers: Vec<Prefetcher>,
fetchers: Vec<Fetcher>,
#[serde(default)]
prepend_prefetchers: Vec<Prefetcher>,
prepend_fetchers: Vec<Fetcher>,
#[serde(default)]
append_prefetchers: Vec<Prefetcher>,
append_fetchers: Vec<Fetcher>,
preloaders: Vec<Preloader>,
#[serde(default)]
@ -49,39 +49,39 @@ impl Default for Plugin {
shadow.previewers.retain(|r| !r.any_dir());
}
Preset::mix(&mut shadow.prefetchers, shadow.prepend_prefetchers, shadow.append_prefetchers);
Preset::mix(&mut shadow.fetchers, shadow.prepend_fetchers, shadow.append_fetchers);
Preset::mix(&mut shadow.preloaders, shadow.prepend_preloaders, shadow.append_preloaders);
Preset::mix(&mut shadow.previewers, shadow.prepend_previewers, shadow.append_previewers);
if shadow.prefetchers.len() + shadow.preloaders.len() > MAX_PREWORKERS as usize {
panic!("Prefetchers and preloaders exceed the limit of {MAX_PREWORKERS}");
if shadow.fetchers.len() + shadow.preloaders.len() > MAX_PREWORKERS as usize {
panic!("Fetchers and preloaders exceed the limit of {MAX_PREWORKERS}");
}
for (i, p) in shadow.prefetchers.iter_mut().enumerate() {
for (i, p) in shadow.fetchers.iter_mut().enumerate() {
p.id = i as u8;
}
for (i, p) in shadow.preloaders.iter_mut().enumerate() {
p.id = shadow.prefetchers.len() as u8 + i as u8;
p.id = shadow.fetchers.len() as u8 + i as u8;
}
Self {
prefetchers: shadow.prefetchers,
preloaders: shadow.preloaders,
previewers: shadow.previewers,
fetchers: shadow.fetchers,
preloaders: shadow.preloaders,
previewers: shadow.previewers,
}
}
}
impl Plugin {
pub fn prefetchers(
pub fn fetchers(
&self,
path: &Path,
mime: Option<&str>,
f: impl Fn(&str) -> bool + Copy,
) -> Vec<&Prefetcher> {
) -> Vec<&Fetcher> {
let is_dir = mime == Some(MIME_DIR);
self
.prefetchers
.fetchers
.iter()
.filter(|&p| {
p.cond.as_ref().and_then(|c| c.eval(f)) != Some(false)

View File

@ -63,8 +63,8 @@ impl Manager {
}
done.extend(files.iter().map(|f| (f.url(), String::new())));
if let Err(e) = isolate::prefetch("mime", files).await {
error!("prefetch `mime` failed in opening: {e}");
if let Err(e) = isolate::fetch("mime", files).await {
error!("fetch `mime` failed in opening: {e}");
}
ManagerProxy::open_do(OpenDoOpt { hovered, targets: done, interactive: opt.interactive });

View File

@ -32,7 +32,7 @@ impl Manager {
}
let targets = self.current().paginate(opt.page.unwrap_or(self.current().page));
tasks.prefetch_paged(targets, &self.mimetype);
tasks.fetch_paged(targets, &self.mimetype);
tasks.preload_paged(targets, &self.mimetype);
}
}

View File

@ -117,8 +117,8 @@ impl Watcher {
if reload.is_empty() {
continue;
}
if let Err(e) = isolate::prefetch("mime", reload).await {
error!("prefetch `mime` failed in watcher: {e}");
if let Err(e) = isolate::fetch("mime", reload).await {
error!("fetch `mime` failed in watcher: {e}");
}
}
}

View File

@ -7,7 +7,7 @@ use super::Tasks;
use crate::folder::Files;
impl Tasks {
pub fn prefetch_paged(&self, paged: &[File], mimetype: &HashMap<Url, String>) {
pub fn fetch_paged(&self, paged: &[File], mimetype: &HashMap<Url, String>) {
let mut loaded = self.scheduler.prework.loaded.lock();
let mut tasks: [Vec<_>; MAX_PREWORKERS as usize] = Default::default();
for f in paged {
@ -17,7 +17,7 @@ impl Tasks {
_ => false,
};
for p in PLUGIN.prefetchers(&f.url, mime, factors) {
for p in PLUGIN.fetchers(&f.url, mime, factors) {
match loaded.get_mut(&f.url) {
Some(n) if *n & (1 << p.id) != 0 => continue,
Some(n) => *n |= 1 << p.id,
@ -30,7 +30,7 @@ impl Tasks {
drop(loaded);
for (i, tasks) in tasks.into_iter().enumerate() {
if !tasks.is_empty() {
self.scheduler.prefetch_paged(&PLUGIN.prefetchers[i], tasks);
self.scheduler.fetch_paged(&PLUGIN.fetchers[i], tasks);
}
}
}
@ -58,7 +58,7 @@ impl Tasks {
}
}
self.prefetch_paged(affected, mimetype);
self.fetch_paged(affected, mimetype);
self.preload_paged(affected, mimetype);
}

View File

@ -9,7 +9,7 @@ local function match_mimetype(s)
end
end
function M:prefetch()
function M:fetch()
local urls = {}
for _, file in ipairs(self.files) do
urls[#urls + 1] = tostring(file.url)

View File

@ -5,7 +5,7 @@ use yazi_config::LAYOUT;
use super::slim_lua;
use crate::{bindings::{Cast, File}, elements::Rect, loader::LOADER};
pub async fn prefetch(name: &str, files: Vec<yazi_shared::fs::File>) -> mlua::Result<u8> {
pub async fn fetch(name: &str, files: Vec<yazi_shared::fs::File>) -> mlua::Result<u8> {
LOADER.ensure(name).await.into_lua_err()?;
let name = name.to_owned();
@ -26,7 +26,7 @@ pub async fn prefetch(name: &str, files: Vec<yazi_shared::fs::File>) -> mlua::Re
plugin.raw_set("area", Rect::cast(&lua, LAYOUT.load().preview)?)?;
plugin.raw_set("files", files)?;
Handle::current().block_on(plugin.call_async_method("prefetch", ()))
Handle::current().block_on(plugin.call_async_method("fetch", ()))
})
.await
.into_lua_err()?

View File

@ -1,15 +1,15 @@
#![allow(clippy::module_inception)]
mod entry;
mod fetch;
mod isolate;
mod peek;
mod prefetch;
mod preload;
mod seek;
pub use entry::*;
pub use fetch::*;
pub use isolate::*;
pub use peek::*;
pub use prefetch::*;
pub use preload::*;
pub use seek::*;

View File

@ -1,6 +1,6 @@
use std::sync::Arc;
use yazi_config::plugin::{PrefetcherProps, PreloaderProps};
use yazi_config::plugin::{FetcherProps, PreloaderProps};
use yazi_shared::{fs::Url, Throttle};
#[derive(Debug)]
@ -23,7 +23,7 @@ impl PreworkOp {
#[derive(Clone, Debug)]
pub struct PreworkOpFetch {
pub id: usize,
pub plugin: PrefetcherProps,
pub plugin: FetcherProps,
pub targets: Vec<yazi_shared::fs::File>,
}

View File

@ -31,15 +31,15 @@ impl Prework {
match op {
PreworkOp::Fetch(task) => {
let urls: Vec<_> = task.targets.iter().map(|f| f.url()).collect();
let result = isolate::prefetch(&task.plugin.name, task.targets).await;
let result = isolate::fetch(&task.plugin.name, task.targets).await;
if let Err(e) = result {
self.fail(task.id, format!("Prefetch task failed:\n{e}"))?;
self.fail(task.id, format!("Fetch task failed:\n{e}"))?;
return Err(e.into());
};
let code = result.unwrap();
if code & 1 == 0 {
error!("Prefetch task `{}` returned {code}", task.plugin.name);
error!("Fetch task `{}` returned {code}", task.plugin.name);
}
if code >> 1 & 1 != 0 {
let mut loaded = self.loaded.lock();

View File

@ -4,7 +4,7 @@ use anyhow::Result;
use futures::{future::BoxFuture, FutureExt};
use parking_lot::Mutex;
use tokio::{fs, select, sync::{mpsc::{self, UnboundedReceiver}, oneshot}, task::JoinHandle};
use yazi_config::{open::Opener, plugin::{Prefetcher, Preloader}, TASKS};
use yazi_config::{open::Opener, plugin::{Fetcher, Preloader}, TASKS};
use yazi_dds::Pump;
use yazi_shared::{event::Data, fs::{unique_path, Url}, Throttle};
@ -218,13 +218,13 @@ impl Scheduler {
self.plugin.macro_(PluginOpEntry { id, name, args }).ok();
}
pub fn prefetch_paged(&self, prefetcher: &Prefetcher, targets: Vec<yazi_shared::fs::File>) {
pub fn fetch_paged(&self, fetcher: &Fetcher, targets: Vec<yazi_shared::fs::File>) {
let id = self.ongoing.lock().add(
TaskKind::Preload,
format!("Run prefetcher `{}` with {} target(s)", prefetcher.run.name, targets.len()),
format!("Run fetcher `{}` with {} target(s)", fetcher.run.name, targets.len()),
);
let plugin = prefetcher.into();
let plugin = fetcher.into();
let prework = self.prework.clone();
_ = self.micro.try_send(
async move {